##// END OF EJS Templates
merge: merged default changes into new-ui
marcink -
r3638:c1c37b0b merge new-ui
parent child Browse files
Show More
@@ -0,0 +1,41 b''
1 |RCE| 4.16.2 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2019-04-02
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18
19
20 Security
21 ^^^^^^^^
22
23
24
25 Performance
26 ^^^^^^^^^^^
27
28
29
30 Fixes
31 ^^^^^
32
33 - Integrations: fixed missing template variable for fork reference checks.
34 - Permissions: fixed server error when showing permissions for user groups.
35 - Pull requests: fixed a bug in removal of multiple reviewers at once.
36
37
38 Upgrade notes
39 ^^^^^^^^^^^^^
40
41 - Scheduled release addressing problems in 4.16.X releases.
@@ -0,0 +1,54 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4
5 from alembic.migration import MigrationContext
6 from alembic.operations import Operations
7 from sqlalchemy import String, Column
8 from sqlalchemy.sql import text
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import meta, init_model_encryption
12 from rhodecode.model.db import RepoGroup
13
14
15 log = logging.getLogger(__name__)
16
17
18 def upgrade(migrate_engine):
19 """
20 Upgrade operations go here.
21 Don't create your own engine; bind migrate_engine to your metadata
22 """
23 _reset_base(migrate_engine)
24 from rhodecode.lib.dbmigrate.schema import db_4_16_0_2
25
26 init_model_encryption(db_4_16_0_2)
27
28 context = MigrationContext.configure(migrate_engine.connect())
29 op = Operations(context)
30
31 repo_group = db_4_16_0_2.RepoGroup.__table__
32
33 with op.batch_alter_table(repo_group.name) as batch_op:
34 batch_op.add_column(
35 Column("repo_group_name_hash", String(1024), nullable=True, unique=False))
36
37 _generate_repo_group_name_hashes(db_4_16_0_2, op, meta.Session)
38
39
40 def downgrade(migrate_engine):
41 pass
42
43
44 def _generate_repo_group_name_hashes(models, op, session):
45 repo_groups = models.RepoGroup.get_all()
46 for repo_group in repo_groups:
47 print(repo_group.group_name)
48 hash_ = RepoGroup.hash_repo_group_name(repo_group.group_name)
49 params = {'hash': hash_, 'id': repo_group.group_id}
50 query = text(
51 'UPDATE groups SET repo_group_name_hash = :hash'
52 ' WHERE group_id = :id').bindparams(**params)
53 op.execute(query)
54 session().commit()
@@ -0,0 +1,39 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4
5 from alembic.migration import MigrationContext
6 from alembic.operations import Operations
7
8 from rhodecode.lib.dbmigrate.versions import _reset_base
9 from rhodecode.model import init_model_encryption
10
11
12 log = logging.getLogger(__name__)
13
14
15 def upgrade(migrate_engine):
16 """
17 Upgrade operations go here.
18 Don't create your own engine; bind migrate_engine to your metadata
19 """
20 _reset_base(migrate_engine)
21 from rhodecode.lib.dbmigrate.schema import db_4_16_0_2
22
23 init_model_encryption(db_4_16_0_2)
24
25 context = MigrationContext.configure(migrate_engine.connect())
26 op = Operations(context)
27
28 repo_group = db_4_16_0_2.RepoGroup.__table__
29
30 with op.batch_alter_table(repo_group.name) as batch_op:
31 batch_op.alter_column("repo_group_name_hash", nullable=False)
32
33
34 def downgrade(migrate_engine):
35 pass
36
37
38 def _generate_repo_group_name_hashes(models, op, session):
39 pass
@@ -1,53 +1,54 b''
1 1 1bd3e92b7e2e2d2024152b34bb88dff1db544a71 v4.0.0
2 2 170c5398320ea6cddd50955e88d408794c21d43a v4.0.1
3 3 c3fe200198f5aa34cf2e4066df2881a9cefe3704 v4.1.0
4 4 7fd5c850745e2ea821fb4406af5f4bff9b0a7526 v4.1.1
5 5 41c87da28a179953df86061d817bc35533c66dd2 v4.1.2
6 6 baaf9f5bcea3bae0ef12ae20c8b270482e62abb6 v4.2.0
7 7 32a70c7e56844a825f61df496ee5eaf8c3c4e189 v4.2.1
8 8 fa695cdb411d294679ac081d595ac654e5613b03 v4.3.0
9 9 0e4dc11b58cad833c513fe17bac39e6850edf959 v4.3.1
10 10 8a876f48f5cb1d018b837db28ff928500cb32cfb v4.4.0
11 11 8dd86b410b1aac086ffdfc524ef300f896af5047 v4.4.1
12 12 d2514226abc8d3b4f6fb57765f47d1b6fb360a05 v4.4.2
13 13 27d783325930af6dad2741476c0d0b1b7c8415c2 v4.5.0
14 14 7f2016f352abcbdba4a19d4039c386e9629449da v4.5.1
15 15 416fec799314c70a5c780fb28b3357b08869333a v4.5.2
16 16 27c3b85fafc83143e6678fbc3da69e1615bcac55 v4.6.0
17 17 5ad13deb9118c2a5243d4032d4d9cc174e5872db v4.6.1
18 18 2be921e01fa24bb102696ada596f87464c3666f6 v4.7.0
19 19 7198bdec29c2872c974431d55200d0398354cdb1 v4.7.1
20 20 bd1c8d230fe741c2dfd7100a0ef39fd0774fd581 v4.7.2
21 21 9731914f89765d9628dc4dddc84bc9402aa124c8 v4.8.0
22 22 c5a2b7d0e4bbdebc4a62d7b624befe375207b659 v4.9.0
23 23 d9aa3b27ac9f7e78359775c75fedf7bfece232f1 v4.9.1
24 24 4ba4d74981cec5d6b28b158f875a2540952c2f74 v4.10.0
25 25 0a6821cbd6b0b3c21503002f88800679fa35ab63 v4.10.1
26 26 434ad90ec8d621f4416074b84f6e9ce03964defb v4.10.2
27 27 68baee10e698da2724c6e0f698c03a6abb993bf2 v4.10.3
28 28 00821d3afd1dce3f4767cc353f84a17f7d5218a1 v4.10.4
29 29 22f6744ad8cc274311825f63f953e4dee2ea5cb9 v4.10.5
30 30 96eb24bea2f5f9258775245e3f09f6fa0a4dda01 v4.10.6
31 31 3121217a812c956d7dd5a5875821bd73e8002a32 v4.11.0
32 32 fa98b454715ac5b912f39e84af54345909a2a805 v4.11.1
33 33 3982abcfdcc229a723cebe52d3a9bcff10bba08e v4.11.2
34 34 33195f145db9172f0a8f1487e09207178a6ab065 v4.11.3
35 35 194c74f33e32bbae6fc4d71ec5a999cff3c13605 v4.11.4
36 36 8fbd8b0c3ddc2fa4ac9e4ca16942a03eb593df2d v4.11.5
37 37 f0609aa5d5d05a1ca2f97c3995542236131c9d8a v4.11.6
38 38 b5b30547d90d2e088472a70c84878f429ffbf40d v4.12.0
39 39 9072253aa8894d20c00b4a43dc61c2168c1eff94 v4.12.1
40 40 6a517543ea9ef9987d74371bd2a315eb0b232dc9 v4.12.2
41 41 7fc0731b024c3114be87865eda7ab621cc957e32 v4.12.3
42 42 6d531c0b068c6eda62dddceedc9f845ecb6feb6f v4.12.4
43 43 3d6bf2d81b1564830eb5e83396110d2a9a93eb1e v4.13.0
44 44 5468fc89e708bd90e413cd0d54350017abbdbc0e v4.13.1
45 45 610d621550521c314ee97b3d43473ac0bcf06fb8 v4.13.2
46 46 7dc62c090881fb5d03268141e71e0940d7c3295d v4.13.3
47 47 9151328c1c46b72ba6f00d7640d9141e75aa1ca2 v4.14.0
48 48 a47eeac5dfa41fa6779d90452affba4091c3ade8 v4.14.1
49 49 4b34ce0d2c3c10510626b3b65044939bb7a2cddf v4.15.0
50 50 14502561d22e6b70613674cd675ae9a604b7989f v4.15.1
51 51 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2
52 52 797744642eca86640ed20bef2cd77445780abaec v4.16.0
53 53 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1
54 5d8057df561c4b6b81b6401aed7d2f911e6e77f7 v4.16.2
@@ -1,130 +1,131 b''
1 1 .. _rhodecode-release-notes-ref:
2 2
3 3 Release Notes
4 4 =============
5 5
6 6 |RCE| 4.x Versions
7 7 ------------------
8 8
9 9 .. toctree::
10 10 :maxdepth: 1
11 11
12 release-notes-4.16.2.rst
12 13 release-notes-4.16.1.rst
13 14 release-notes-4.16.0.rst
14 15 release-notes-4.15.2.rst
15 16 release-notes-4.15.1.rst
16 17 release-notes-4.15.0.rst
17 18 release-notes-4.14.1.rst
18 19 release-notes-4.14.0.rst
19 20 release-notes-4.13.3.rst
20 21 release-notes-4.13.2.rst
21 22 release-notes-4.13.1.rst
22 23 release-notes-4.13.0.rst
23 24 release-notes-4.12.4.rst
24 25 release-notes-4.12.3.rst
25 26 release-notes-4.12.2.rst
26 27 release-notes-4.12.1.rst
27 28 release-notes-4.12.0.rst
28 29 release-notes-4.11.6.rst
29 30 release-notes-4.11.5.rst
30 31 release-notes-4.11.4.rst
31 32 release-notes-4.11.3.rst
32 33 release-notes-4.11.2.rst
33 34 release-notes-4.11.1.rst
34 35 release-notes-4.11.0.rst
35 36 release-notes-4.10.6.rst
36 37 release-notes-4.10.5.rst
37 38 release-notes-4.10.4.rst
38 39 release-notes-4.10.3.rst
39 40 release-notes-4.10.2.rst
40 41 release-notes-4.10.1.rst
41 42 release-notes-4.10.0.rst
42 43 release-notes-4.9.1.rst
43 44 release-notes-4.9.0.rst
44 45 release-notes-4.8.0.rst
45 46 release-notes-4.7.2.rst
46 47 release-notes-4.7.1.rst
47 48 release-notes-4.7.0.rst
48 49 release-notes-4.6.1.rst
49 50 release-notes-4.6.0.rst
50 51 release-notes-4.5.2.rst
51 52 release-notes-4.5.1.rst
52 53 release-notes-4.5.0.rst
53 54 release-notes-4.4.2.rst
54 55 release-notes-4.4.1.rst
55 56 release-notes-4.4.0.rst
56 57 release-notes-4.3.1.rst
57 58 release-notes-4.3.0.rst
58 59 release-notes-4.2.1.rst
59 60 release-notes-4.2.0.rst
60 61 release-notes-4.1.2.rst
61 62 release-notes-4.1.1.rst
62 63 release-notes-4.1.0.rst
63 64 release-notes-4.0.1.rst
64 65 release-notes-4.0.0.rst
65 66
66 67 |RCE| 3.x Versions
67 68 ------------------
68 69
69 70 .. toctree::
70 71 :maxdepth: 1
71 72
72 73 release-notes-3.8.4.rst
73 74 release-notes-3.8.3.rst
74 75 release-notes-3.8.2.rst
75 76 release-notes-3.8.1.rst
76 77 release-notes-3.8.0.rst
77 78 release-notes-3.7.1.rst
78 79 release-notes-3.7.0.rst
79 80 release-notes-3.6.1.rst
80 81 release-notes-3.6.0.rst
81 82 release-notes-3.5.2.rst
82 83 release-notes-3.5.1.rst
83 84 release-notes-3.5.0.rst
84 85 release-notes-3.4.1.rst
85 86 release-notes-3.4.0.rst
86 87 release-notes-3.3.4.rst
87 88 release-notes-3.3.3.rst
88 89 release-notes-3.3.2.rst
89 90 release-notes-3.3.1.rst
90 91 release-notes-3.3.0.rst
91 92 release-notes-3.2.3.rst
92 93 release-notes-3.2.2.rst
93 94 release-notes-3.2.1.rst
94 95 release-notes-3.2.0.rst
95 96 release-notes-3.1.1.rst
96 97 release-notes-3.1.0.rst
97 98 release-notes-3.0.2.rst
98 99 release-notes-3.0.1.rst
99 100 release-notes-3.0.0.rst
100 101
101 102 |RCE| 2.x Versions
102 103 ------------------
103 104
104 105 .. toctree::
105 106 :maxdepth: 1
106 107
107 108 release-notes-2.2.8.rst
108 109 release-notes-2.2.7.rst
109 110 release-notes-2.2.6.rst
110 111 release-notes-2.2.5.rst
111 112 release-notes-2.2.4.rst
112 113 release-notes-2.2.3.rst
113 114 release-notes-2.2.2.rst
114 115 release-notes-2.2.1.rst
115 116 release-notes-2.2.0.rst
116 117 release-notes-2.1.0.rst
117 118 release-notes-2.0.2.rst
118 119 release-notes-2.0.1.rst
119 120 release-notes-2.0.0.rst
120 121
121 122 |RCE| 1.x Versions
122 123 ------------------
123 124
124 125 .. toctree::
125 126 :maxdepth: 1
126 127
127 128 release-notes-1.7.2.rst
128 129 release-notes-1.7.1.rst
129 130 release-notes-1.7.0.rst
130 131 release-notes-1.6.0.rst
@@ -1,2361 +1,2361 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "alembic" = super.buildPythonPackage {
8 name = "alembic-1.0.8";
8 name = "alembic-1.0.9";
9 9 doCheck = false;
10 10 propagatedBuildInputs = [
11 11 self."sqlalchemy"
12 12 self."mako"
13 13 self."python-editor"
14 14 self."python-dateutil"
15 15 ];
16 16 src = fetchurl {
17 url = "https://files.pythonhosted.org/packages/d6/bb/ec1e21f2e303689ad2170eb47fc67df9ad4199ade6759a99474c4d3535c8/alembic-1.0.8.tar.gz";
18 sha256 = "1s34i1j0dsxbflxligwhnkf37a5hvcshsv8ibkcfdjf03ph42pah";
17 url = "https://files.pythonhosted.org/packages/fc/42/8729e2491fa9b8eae160d1cbb429f61712bfc2d779816488c25cfdabf7b8/alembic-1.0.9.tar.gz";
18 sha256 = "0a88rwp7fp0y8ykczj82ivr4ww1kiflcvb882lgfl9azm8csdfa0";
19 19 };
20 20 meta = {
21 21 license = [ pkgs.lib.licenses.mit ];
22 22 };
23 23 };
24 24 "amqp" = super.buildPythonPackage {
25 25 name = "amqp-2.3.1";
26 26 doCheck = false;
27 27 propagatedBuildInputs = [
28 28 self."vine"
29 29 ];
30 30 src = fetchurl {
31 31 url = "https://files.pythonhosted.org/packages/1b/32/242ff76cd802766f11c89c72f3389b5c8de4bdfbab406137b90c5fae8b05/amqp-2.3.1.tar.gz";
32 32 sha256 = "0wlfnvhmfrn7c8qif2jyvsm63ibdxp02ss564qwrvqfhz0di72s0";
33 33 };
34 34 meta = {
35 35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 36 };
37 37 };
38 38 "appenlight-client" = super.buildPythonPackage {
39 39 name = "appenlight-client-0.6.26";
40 40 doCheck = false;
41 41 propagatedBuildInputs = [
42 42 self."webob"
43 43 self."requests"
44 44 self."six"
45 45 ];
46 46 src = fetchurl {
47 47 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
48 48 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 "asn1crypto" = super.buildPythonPackage {
55 55 name = "asn1crypto-0.24.0";
56 56 doCheck = false;
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
59 59 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "atomicwrites" = super.buildPythonPackage {
66 66 name = "atomicwrites-1.2.1";
67 67 doCheck = false;
68 68 src = fetchurl {
69 69 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
70 70 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
71 71 };
72 72 meta = {
73 73 license = [ pkgs.lib.licenses.mit ];
74 74 };
75 75 };
76 76 "attrs" = super.buildPythonPackage {
77 77 name = "attrs-18.2.0";
78 78 doCheck = false;
79 79 src = fetchurl {
80 80 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
81 81 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
82 82 };
83 83 meta = {
84 84 license = [ pkgs.lib.licenses.mit ];
85 85 };
86 86 };
87 87 "authomatic" = super.buildPythonPackage {
88 88 name = "authomatic-0.1.0.post1";
89 89 doCheck = false;
90 90 src = fetchurl {
91 91 url = "https://code.rhodecode.com/upstream/authomatic/archive/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e.tar.gz?md5=3c68720a1322b25254009518d1ff6801";
92 92 sha256 = "1cgk0a86sbsjbri06gf5z5l4npwkjdxw6fdnwl4vvfmxs2sx9yxw";
93 93 };
94 94 meta = {
95 95 license = [ pkgs.lib.licenses.mit ];
96 96 };
97 97 };
98 98 "babel" = super.buildPythonPackage {
99 99 name = "babel-1.3";
100 100 doCheck = false;
101 101 propagatedBuildInputs = [
102 102 self."pytz"
103 103 ];
104 104 src = fetchurl {
105 105 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
106 106 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
107 107 };
108 108 meta = {
109 109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 110 };
111 111 };
112 112 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
113 113 name = "backports.shutil-get-terminal-size-1.0.0";
114 114 doCheck = false;
115 115 src = fetchurl {
116 116 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
117 117 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
118 118 };
119 119 meta = {
120 120 license = [ pkgs.lib.licenses.mit ];
121 121 };
122 122 };
123 123 "beaker" = super.buildPythonPackage {
124 124 name = "beaker-1.9.1";
125 125 doCheck = false;
126 126 propagatedBuildInputs = [
127 127 self."funcsigs"
128 128 ];
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
131 131 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.bsdOriginal ];
135 135 };
136 136 };
137 137 "beautifulsoup4" = super.buildPythonPackage {
138 138 name = "beautifulsoup4-4.6.3";
139 139 doCheck = false;
140 140 src = fetchurl {
141 141 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
142 142 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 "billiard" = super.buildPythonPackage {
149 149 name = "billiard-3.5.0.3";
150 150 doCheck = false;
151 151 src = fetchurl {
152 152 url = "https://files.pythonhosted.org/packages/39/ac/f5571210cca2e4f4532e38aaff242f26c8654c5e2436bee966c230647ccc/billiard-3.5.0.3.tar.gz";
153 153 sha256 = "1riwiiwgb141151md4ykx49qrz749akj5k8g290ji9bsqjyj4yqx";
154 154 };
155 155 meta = {
156 156 license = [ pkgs.lib.licenses.bsdOriginal ];
157 157 };
158 158 };
159 159 "bleach" = super.buildPythonPackage {
160 160 name = "bleach-3.1.0";
161 161 doCheck = false;
162 162 propagatedBuildInputs = [
163 163 self."six"
164 164 self."webencodings"
165 165 ];
166 166 src = fetchurl {
167 167 url = "https://files.pythonhosted.org/packages/78/5a/0df03e8735cd9c75167528299c738702437589b9c71a849489d00ffa82e8/bleach-3.1.0.tar.gz";
168 168 sha256 = "1yhrgrhkln8bd6gn3imj69g1h4xqah9gaz9q26crqr6gmmvpzprz";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.asl20 ];
172 172 };
173 173 };
174 174 "bumpversion" = super.buildPythonPackage {
175 175 name = "bumpversion-0.5.3";
176 176 doCheck = false;
177 177 src = fetchurl {
178 178 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
179 179 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
180 180 };
181 181 meta = {
182 182 license = [ pkgs.lib.licenses.mit ];
183 183 };
184 184 };
185 185 "celery" = super.buildPythonPackage {
186 186 name = "celery-4.1.1";
187 187 doCheck = false;
188 188 propagatedBuildInputs = [
189 189 self."pytz"
190 190 self."billiard"
191 191 self."kombu"
192 192 ];
193 193 src = fetchurl {
194 194 url = "https://files.pythonhosted.org/packages/e9/cf/a4c0597effca20c57eb586324e41d1180bc8f13a933da41e0646cff69f02/celery-4.1.1.tar.gz";
195 195 sha256 = "1xbir4vw42n2ir9lanhwl7w69zpmj7lbi66fxm2b7pyvkcss7wni";
196 196 };
197 197 meta = {
198 198 license = [ pkgs.lib.licenses.bsdOriginal ];
199 199 };
200 200 };
201 201 "cffi" = super.buildPythonPackage {
202 202 name = "cffi-1.12.2";
203 203 doCheck = false;
204 204 propagatedBuildInputs = [
205 205 self."pycparser"
206 206 ];
207 207 src = fetchurl {
208 208 url = "https://files.pythonhosted.org/packages/64/7c/27367b38e6cc3e1f49f193deb761fe75cda9f95da37b67b422e62281fcac/cffi-1.12.2.tar.gz";
209 209 sha256 = "19qfks2djya8vix95bmg3xzipjb8w9b8mbj4j5k2hqkc8j58f4z1";
210 210 };
211 211 meta = {
212 212 license = [ pkgs.lib.licenses.mit ];
213 213 };
214 214 };
215 215 "chameleon" = super.buildPythonPackage {
216 216 name = "chameleon-2.24";
217 217 doCheck = false;
218 218 src = fetchurl {
219 219 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
220 220 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
221 221 };
222 222 meta = {
223 223 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
224 224 };
225 225 };
226 226 "channelstream" = super.buildPythonPackage {
227 227 name = "channelstream-0.5.2";
228 228 doCheck = false;
229 229 propagatedBuildInputs = [
230 230 self."gevent"
231 231 self."ws4py"
232 232 self."pyramid"
233 233 self."pyramid-jinja2"
234 234 self."itsdangerous"
235 235 self."requests"
236 236 self."six"
237 237 ];
238 238 src = fetchurl {
239 239 url = "https://files.pythonhosted.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
240 240 sha256 = "1qbm4xdl5hfkja683x546bncg3rqq8qv79w1m1a1wd48cqqzb6rm";
241 241 };
242 242 meta = {
243 243 license = [ pkgs.lib.licenses.bsdOriginal ];
244 244 };
245 245 };
246 246 "click" = super.buildPythonPackage {
247 247 name = "click-7.0";
248 248 doCheck = false;
249 249 src = fetchurl {
250 250 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
251 251 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
252 252 };
253 253 meta = {
254 254 license = [ pkgs.lib.licenses.bsdOriginal ];
255 255 };
256 256 };
257 257 "colander" = super.buildPythonPackage {
258 258 name = "colander-1.7.0";
259 259 doCheck = false;
260 260 propagatedBuildInputs = [
261 261 self."translationstring"
262 262 self."iso8601"
263 263 self."enum34"
264 264 ];
265 265 src = fetchurl {
266 266 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
267 267 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
268 268 };
269 269 meta = {
270 270 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
271 271 };
272 272 };
273 273 "configobj" = super.buildPythonPackage {
274 274 name = "configobj-5.0.6";
275 275 doCheck = false;
276 276 propagatedBuildInputs = [
277 277 self."six"
278 278 ];
279 279 src = fetchurl {
280 280 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
281 281 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
282 282 };
283 283 meta = {
284 284 license = [ pkgs.lib.licenses.bsdOriginal ];
285 285 };
286 286 };
287 287 "configparser" = super.buildPythonPackage {
288 288 name = "configparser-3.7.4";
289 289 doCheck = false;
290 290 src = fetchurl {
291 291 url = "https://files.pythonhosted.org/packages/e2/1c/83fd53748d8245cb9a3399f705c251d3fc0ce7df04450aac1cfc49dd6a0f/configparser-3.7.4.tar.gz";
292 292 sha256 = "0xac32886ihs2xg7w1gppcq2sgin5qsm8lqwijs5xifq9w0x0q6s";
293 293 };
294 294 meta = {
295 295 license = [ pkgs.lib.licenses.mit ];
296 296 };
297 297 };
298 298 "cov-core" = super.buildPythonPackage {
299 299 name = "cov-core-1.15.0";
300 300 doCheck = false;
301 301 propagatedBuildInputs = [
302 302 self."coverage"
303 303 ];
304 304 src = fetchurl {
305 305 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
306 306 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
307 307 };
308 308 meta = {
309 309 license = [ pkgs.lib.licenses.mit ];
310 310 };
311 311 };
312 312 "coverage" = super.buildPythonPackage {
313 name = "coverage-4.5.1";
313 name = "coverage-4.5.3";
314 314 doCheck = false;
315 315 src = fetchurl {
316 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
317 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
316 url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz";
317 sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx";
318 318 };
319 319 meta = {
320 320 license = [ pkgs.lib.licenses.asl20 ];
321 321 };
322 322 };
323 323 "cryptography" = super.buildPythonPackage {
324 324 name = "cryptography-2.6.1";
325 325 doCheck = false;
326 326 propagatedBuildInputs = [
327 327 self."asn1crypto"
328 328 self."six"
329 329 self."cffi"
330 330 self."enum34"
331 331 self."ipaddress"
332 332 ];
333 333 src = fetchurl {
334 334 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
335 335 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
336 336 };
337 337 meta = {
338 338 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
339 339 };
340 340 };
341 341 "cssselect" = super.buildPythonPackage {
342 342 name = "cssselect-1.0.3";
343 343 doCheck = false;
344 344 src = fetchurl {
345 345 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
346 346 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
347 347 };
348 348 meta = {
349 349 license = [ pkgs.lib.licenses.bsdOriginal ];
350 350 };
351 351 };
352 352 "decorator" = super.buildPythonPackage {
353 353 name = "decorator-4.1.2";
354 354 doCheck = false;
355 355 src = fetchurl {
356 356 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
357 357 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
358 358 };
359 359 meta = {
360 360 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
361 361 };
362 362 };
363 363 "deform" = super.buildPythonPackage {
364 364 name = "deform-2.0.7";
365 365 doCheck = false;
366 366 propagatedBuildInputs = [
367 367 self."chameleon"
368 368 self."colander"
369 369 self."iso8601"
370 370 self."peppercorn"
371 371 self."translationstring"
372 372 self."zope.deprecation"
373 373 ];
374 374 src = fetchurl {
375 375 url = "https://files.pythonhosted.org/packages/cf/a1/bc234527b8f181de9acd80e796483c00007658d1e32b7de78f1c2e004d9a/deform-2.0.7.tar.gz";
376 376 sha256 = "0jnpi0zr2hjvbmiz6nm33yqv976dn9lf51vhlzqc0i75xcr9rwig";
377 377 };
378 378 meta = {
379 379 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
380 380 };
381 381 };
382 382 "defusedxml" = super.buildPythonPackage {
383 383 name = "defusedxml-0.5.0";
384 384 doCheck = false;
385 385 src = fetchurl {
386 386 url = "https://files.pythonhosted.org/packages/74/ba/4ba4e89e21b5a2e267d80736ea674609a0a33cc4435a6d748ef04f1f9374/defusedxml-0.5.0.tar.gz";
387 387 sha256 = "1x54n0h8hl92vvwyymx883fbqpqjwn2mc8fb383bcg3z9zwz5mr4";
388 388 };
389 389 meta = {
390 390 license = [ pkgs.lib.licenses.psfl ];
391 391 };
392 392 };
393 393 "dm.xmlsec.binding" = super.buildPythonPackage {
394 394 name = "dm.xmlsec.binding-1.3.7";
395 395 doCheck = false;
396 396 propagatedBuildInputs = [
397 397 self."setuptools"
398 398 self."lxml"
399 399 ];
400 400 src = fetchurl {
401 401 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
402 402 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.bsdOriginal ];
406 406 };
407 407 };
408 408 "docutils" = super.buildPythonPackage {
409 409 name = "docutils-0.14";
410 410 doCheck = false;
411 411 src = fetchurl {
412 412 url = "https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-0.14.tar.gz";
413 413 sha256 = "0x22fs3pdmr42kvz6c654756wja305qv6cx1zbhwlagvxgr4xrji";
414 414 };
415 415 meta = {
416 416 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
417 417 };
418 418 };
419 419 "dogpile.cache" = super.buildPythonPackage {
420 420 name = "dogpile.cache-0.7.1";
421 421 doCheck = false;
422 422 propagatedBuildInputs = [
423 423 self."decorator"
424 424 ];
425 425 src = fetchurl {
426 426 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
427 427 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
428 428 };
429 429 meta = {
430 430 license = [ pkgs.lib.licenses.bsdOriginal ];
431 431 };
432 432 };
433 433 "dogpile.core" = super.buildPythonPackage {
434 434 name = "dogpile.core-0.4.1";
435 435 doCheck = false;
436 436 src = fetchurl {
437 437 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
438 438 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
439 439 };
440 440 meta = {
441 441 license = [ pkgs.lib.licenses.bsdOriginal ];
442 442 };
443 443 };
444 444 "ecdsa" = super.buildPythonPackage {
445 445 name = "ecdsa-0.13";
446 446 doCheck = false;
447 447 src = fetchurl {
448 448 url = "https://files.pythonhosted.org/packages/f9/e5/99ebb176e47f150ac115ffeda5fedb6a3dbb3c00c74a59fd84ddf12f5857/ecdsa-0.13.tar.gz";
449 449 sha256 = "1yj31j0asmrx4an9xvsaj2icdmzy6pw0glfpqrrkrphwdpi1xkv4";
450 450 };
451 451 meta = {
452 452 license = [ pkgs.lib.licenses.mit ];
453 453 };
454 454 };
455 455 "elasticsearch" = super.buildPythonPackage {
456 456 name = "elasticsearch-6.3.1";
457 457 doCheck = false;
458 458 propagatedBuildInputs = [
459 459 self."urllib3"
460 460 ];
461 461 src = fetchurl {
462 462 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
463 463 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
464 464 };
465 465 meta = {
466 466 license = [ pkgs.lib.licenses.asl20 ];
467 467 };
468 468 };
469 469 "elasticsearch-dsl" = super.buildPythonPackage {
470 470 name = "elasticsearch-dsl-6.3.1";
471 471 doCheck = false;
472 472 propagatedBuildInputs = [
473 473 self."six"
474 474 self."python-dateutil"
475 475 self."elasticsearch"
476 476 self."ipaddress"
477 477 ];
478 478 src = fetchurl {
479 479 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
480 480 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
481 481 };
482 482 meta = {
483 483 license = [ pkgs.lib.licenses.asl20 ];
484 484 };
485 485 };
486 486 "elasticsearch1" = super.buildPythonPackage {
487 487 name = "elasticsearch1-1.10.0";
488 488 doCheck = false;
489 489 propagatedBuildInputs = [
490 490 self."urllib3"
491 491 ];
492 492 src = fetchurl {
493 493 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
494 494 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
495 495 };
496 496 meta = {
497 497 license = [ pkgs.lib.licenses.asl20 ];
498 498 };
499 499 };
500 500 "elasticsearch1-dsl" = super.buildPythonPackage {
501 501 name = "elasticsearch1-dsl-0.0.12";
502 502 doCheck = false;
503 503 propagatedBuildInputs = [
504 504 self."six"
505 505 self."python-dateutil"
506 506 self."elasticsearch1"
507 507 ];
508 508 src = fetchurl {
509 509 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
510 510 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
511 511 };
512 512 meta = {
513 513 license = [ pkgs.lib.licenses.asl20 ];
514 514 };
515 515 };
516 516 "elasticsearch2" = super.buildPythonPackage {
517 517 name = "elasticsearch2-2.5.0";
518 518 doCheck = false;
519 519 propagatedBuildInputs = [
520 520 self."urllib3"
521 521 ];
522 522 src = fetchurl {
523 523 url = "https://files.pythonhosted.org/packages/84/77/63cf63d4ba11d913b5278406f2a37b0712bec6fc85edfb6151a33eaeba25/elasticsearch2-2.5.0.tar.gz";
524 524 sha256 = "0ky0q16lbvz022yv6q3pix7aamf026p1y994537ccjf0p0dxnbxr";
525 525 };
526 526 meta = {
527 527 license = [ pkgs.lib.licenses.asl20 ];
528 528 };
529 529 };
530 530 "entrypoints" = super.buildPythonPackage {
531 531 name = "entrypoints-0.2.2";
532 532 doCheck = false;
533 533 propagatedBuildInputs = [
534 534 self."configparser"
535 535 ];
536 536 src = fetchurl {
537 537 url = "https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313";
538 538 sha256 = "0bihrdp8ahsys437kxdhk52gz6kib8rxjv71i93wkw7594fcaxll";
539 539 };
540 540 meta = {
541 541 license = [ pkgs.lib.licenses.mit ];
542 542 };
543 543 };
544 544 "enum34" = super.buildPythonPackage {
545 545 name = "enum34-1.1.6";
546 546 doCheck = false;
547 547 src = fetchurl {
548 548 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
549 549 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
550 550 };
551 551 meta = {
552 552 license = [ pkgs.lib.licenses.bsdOriginal ];
553 553 };
554 554 };
555 555 "formencode" = super.buildPythonPackage {
556 556 name = "formencode-1.2.4";
557 557 doCheck = false;
558 558 src = fetchurl {
559 559 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
560 560 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
561 561 };
562 562 meta = {
563 563 license = [ pkgs.lib.licenses.psfl ];
564 564 };
565 565 };
566 566 "funcsigs" = super.buildPythonPackage {
567 567 name = "funcsigs-1.0.2";
568 568 doCheck = false;
569 569 src = fetchurl {
570 570 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
571 571 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
572 572 };
573 573 meta = {
574 574 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
575 575 };
576 576 };
577 577 "functools32" = super.buildPythonPackage {
578 578 name = "functools32-3.2.3.post2";
579 579 doCheck = false;
580 580 src = fetchurl {
581 581 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
582 582 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
583 583 };
584 584 meta = {
585 585 license = [ pkgs.lib.licenses.psfl ];
586 586 };
587 587 };
588 588 "future" = super.buildPythonPackage {
589 589 name = "future-0.14.3";
590 590 doCheck = false;
591 591 src = fetchurl {
592 592 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
593 593 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
594 594 };
595 595 meta = {
596 596 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
597 597 };
598 598 };
599 599 "futures" = super.buildPythonPackage {
600 600 name = "futures-3.0.2";
601 601 doCheck = false;
602 602 src = fetchurl {
603 603 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
604 604 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
605 605 };
606 606 meta = {
607 607 license = [ pkgs.lib.licenses.bsdOriginal ];
608 608 };
609 609 };
610 610 "gevent" = super.buildPythonPackage {
611 611 name = "gevent-1.4.0";
612 612 doCheck = false;
613 613 propagatedBuildInputs = [
614 614 self."greenlet"
615 615 ];
616 616 src = fetchurl {
617 617 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
618 618 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
619 619 };
620 620 meta = {
621 621 license = [ pkgs.lib.licenses.mit ];
622 622 };
623 623 };
624 624 "gnureadline" = super.buildPythonPackage {
625 625 name = "gnureadline-6.3.8";
626 626 doCheck = false;
627 627 src = fetchurl {
628 628 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
629 629 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
630 630 };
631 631 meta = {
632 632 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
633 633 };
634 634 };
635 635 "gprof2dot" = super.buildPythonPackage {
636 636 name = "gprof2dot-2017.9.19";
637 637 doCheck = false;
638 638 src = fetchurl {
639 639 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
640 640 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
641 641 };
642 642 meta = {
643 643 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
644 644 };
645 645 };
646 646 "greenlet" = super.buildPythonPackage {
647 647 name = "greenlet-0.4.15";
648 648 doCheck = false;
649 649 src = fetchurl {
650 650 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
651 651 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
652 652 };
653 653 meta = {
654 654 license = [ pkgs.lib.licenses.mit ];
655 655 };
656 656 };
657 657 "gunicorn" = super.buildPythonPackage {
658 658 name = "gunicorn-19.9.0";
659 659 doCheck = false;
660 660 src = fetchurl {
661 661 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
662 662 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
663 663 };
664 664 meta = {
665 665 license = [ pkgs.lib.licenses.mit ];
666 666 };
667 667 };
668 668 "hupper" = super.buildPythonPackage {
669 669 name = "hupper-1.6.1";
670 670 doCheck = false;
671 671 src = fetchurl {
672 672 url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz";
673 673 sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy";
674 674 };
675 675 meta = {
676 676 license = [ pkgs.lib.licenses.mit ];
677 677 };
678 678 };
679 679 "infrae.cache" = super.buildPythonPackage {
680 680 name = "infrae.cache-1.0.1";
681 681 doCheck = false;
682 682 propagatedBuildInputs = [
683 683 self."beaker"
684 684 self."repoze.lru"
685 685 ];
686 686 src = fetchurl {
687 687 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
688 688 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
689 689 };
690 690 meta = {
691 691 license = [ pkgs.lib.licenses.zpl21 ];
692 692 };
693 693 };
694 694 "invoke" = super.buildPythonPackage {
695 695 name = "invoke-0.13.0";
696 696 doCheck = false;
697 697 src = fetchurl {
698 698 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
699 699 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
700 700 };
701 701 meta = {
702 702 license = [ pkgs.lib.licenses.bsdOriginal ];
703 703 };
704 704 };
705 705 "ipaddress" = super.buildPythonPackage {
706 706 name = "ipaddress-1.0.22";
707 707 doCheck = false;
708 708 src = fetchurl {
709 709 url = "https://files.pythonhosted.org/packages/97/8d/77b8cedcfbf93676148518036c6b1ce7f8e14bf07e95d7fd4ddcb8cc052f/ipaddress-1.0.22.tar.gz";
710 710 sha256 = "0b570bm6xqpjwqis15pvdy6lyvvzfndjvkynilcddjj5x98wfimi";
711 711 };
712 712 meta = {
713 713 license = [ pkgs.lib.licenses.psfl ];
714 714 };
715 715 };
716 716 "ipdb" = super.buildPythonPackage {
717 717 name = "ipdb-0.12";
718 718 doCheck = false;
719 719 propagatedBuildInputs = [
720 720 self."setuptools"
721 721 self."ipython"
722 722 ];
723 723 src = fetchurl {
724 724 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
725 725 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
726 726 };
727 727 meta = {
728 728 license = [ pkgs.lib.licenses.bsdOriginal ];
729 729 };
730 730 };
731 731 "ipython" = super.buildPythonPackage {
732 732 name = "ipython-5.1.0";
733 733 doCheck = false;
734 734 propagatedBuildInputs = [
735 735 self."setuptools"
736 736 self."decorator"
737 737 self."pickleshare"
738 738 self."simplegeneric"
739 739 self."traitlets"
740 740 self."prompt-toolkit"
741 741 self."pygments"
742 742 self."pexpect"
743 743 self."backports.shutil-get-terminal-size"
744 744 self."pathlib2"
745 745 self."pexpect"
746 746 ];
747 747 src = fetchurl {
748 748 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
749 749 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
750 750 };
751 751 meta = {
752 752 license = [ pkgs.lib.licenses.bsdOriginal ];
753 753 };
754 754 };
755 755 "ipython-genutils" = super.buildPythonPackage {
756 756 name = "ipython-genutils-0.2.0";
757 757 doCheck = false;
758 758 src = fetchurl {
759 759 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
760 760 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
761 761 };
762 762 meta = {
763 763 license = [ pkgs.lib.licenses.bsdOriginal ];
764 764 };
765 765 };
766 766 "iso8601" = super.buildPythonPackage {
767 767 name = "iso8601-0.1.12";
768 768 doCheck = false;
769 769 src = fetchurl {
770 770 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
771 771 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
772 772 };
773 773 meta = {
774 774 license = [ pkgs.lib.licenses.mit ];
775 775 };
776 776 };
777 777 "isodate" = super.buildPythonPackage {
778 778 name = "isodate-0.6.0";
779 779 doCheck = false;
780 780 propagatedBuildInputs = [
781 781 self."six"
782 782 ];
783 783 src = fetchurl {
784 784 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
785 785 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
786 786 };
787 787 meta = {
788 788 license = [ pkgs.lib.licenses.bsdOriginal ];
789 789 };
790 790 };
791 791 "itsdangerous" = super.buildPythonPackage {
792 792 name = "itsdangerous-0.24";
793 793 doCheck = false;
794 794 src = fetchurl {
795 795 url = "https://files.pythonhosted.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
796 796 sha256 = "06856q6x675ly542ig0plbqcyab6ksfzijlyf1hzhgg3sgwgrcyb";
797 797 };
798 798 meta = {
799 799 license = [ pkgs.lib.licenses.bsdOriginal ];
800 800 };
801 801 };
802 802 "jinja2" = super.buildPythonPackage {
803 803 name = "jinja2-2.9.6";
804 804 doCheck = false;
805 805 propagatedBuildInputs = [
806 806 self."markupsafe"
807 807 ];
808 808 src = fetchurl {
809 809 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
810 810 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
811 811 };
812 812 meta = {
813 813 license = [ pkgs.lib.licenses.bsdOriginal ];
814 814 };
815 815 };
816 816 "jsonschema" = super.buildPythonPackage {
817 817 name = "jsonschema-2.6.0";
818 818 doCheck = false;
819 819 propagatedBuildInputs = [
820 820 self."functools32"
821 821 ];
822 822 src = fetchurl {
823 823 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
824 824 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
825 825 };
826 826 meta = {
827 827 license = [ pkgs.lib.licenses.mit ];
828 828 };
829 829 };
830 830 "jupyter-client" = super.buildPythonPackage {
831 831 name = "jupyter-client-5.0.0";
832 832 doCheck = false;
833 833 propagatedBuildInputs = [
834 834 self."traitlets"
835 835 self."jupyter-core"
836 836 self."pyzmq"
837 837 self."python-dateutil"
838 838 ];
839 839 src = fetchurl {
840 840 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
841 841 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
842 842 };
843 843 meta = {
844 844 license = [ pkgs.lib.licenses.bsdOriginal ];
845 845 };
846 846 };
847 847 "jupyter-core" = super.buildPythonPackage {
848 848 name = "jupyter-core-4.4.0";
849 849 doCheck = false;
850 850 propagatedBuildInputs = [
851 851 self."traitlets"
852 852 ];
853 853 src = fetchurl {
854 854 url = "https://files.pythonhosted.org/packages/b6/2d/2804f4de3a95583f65e5dcb4d7c8c7183124882323758996e867f47e72af/jupyter_core-4.4.0.tar.gz";
855 855 sha256 = "1dy083rarba8prn9f9srxq3c7n7vyql02ycrqq306c40lr57aw5s";
856 856 };
857 857 meta = {
858 858 license = [ pkgs.lib.licenses.bsdOriginal ];
859 859 };
860 860 };
861 861 "kombu" = super.buildPythonPackage {
862 862 name = "kombu-4.2.1";
863 863 doCheck = false;
864 864 propagatedBuildInputs = [
865 865 self."amqp"
866 866 ];
867 867 src = fetchurl {
868 868 url = "https://files.pythonhosted.org/packages/39/9f/556b988833abede4a80dbd18b2bdf4e8ff4486dd482ed45da961347e8ed2/kombu-4.2.1.tar.gz";
869 869 sha256 = "10lh3hncvw67fz0k5vgbx3yh9gjfpqdlia1f13i28cgnc1nfrbc6";
870 870 };
871 871 meta = {
872 872 license = [ pkgs.lib.licenses.bsdOriginal ];
873 873 };
874 874 };
875 875 "lxml" = super.buildPythonPackage {
876 876 name = "lxml-4.2.5";
877 877 doCheck = false;
878 878 src = fetchurl {
879 879 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
880 880 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
881 881 };
882 882 meta = {
883 883 license = [ pkgs.lib.licenses.bsdOriginal ];
884 884 };
885 885 };
886 886 "mako" = super.buildPythonPackage {
887 887 name = "mako-1.0.7";
888 888 doCheck = false;
889 889 propagatedBuildInputs = [
890 890 self."markupsafe"
891 891 ];
892 892 src = fetchurl {
893 893 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
894 894 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
895 895 };
896 896 meta = {
897 897 license = [ pkgs.lib.licenses.mit ];
898 898 };
899 899 };
900 900 "markdown" = super.buildPythonPackage {
901 901 name = "markdown-2.6.11";
902 902 doCheck = false;
903 903 src = fetchurl {
904 904 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
905 905 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
906 906 };
907 907 meta = {
908 908 license = [ pkgs.lib.licenses.bsdOriginal ];
909 909 };
910 910 };
911 911 "markupsafe" = super.buildPythonPackage {
912 912 name = "markupsafe-1.1.0";
913 913 doCheck = false;
914 914 src = fetchurl {
915 915 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
916 916 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
917 917 };
918 918 meta = {
919 919 license = [ pkgs.lib.licenses.bsdOriginal ];
920 920 };
921 921 };
922 922 "meld3" = super.buildPythonPackage {
923 923 name = "meld3-1.0.2";
924 924 doCheck = false;
925 925 src = fetchurl {
926 926 url = "https://files.pythonhosted.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
927 927 sha256 = "0n4mkwlpsqnmn0dm0wm5hn9nkda0nafl0jdy5sdl5977znh59dzp";
928 928 };
929 929 meta = {
930 930 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
931 931 };
932 932 };
933 933 "mistune" = super.buildPythonPackage {
934 934 name = "mistune-0.8.4";
935 935 doCheck = false;
936 936 src = fetchurl {
937 937 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
938 938 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
939 939 };
940 940 meta = {
941 941 license = [ pkgs.lib.licenses.bsdOriginal ];
942 942 };
943 943 };
944 944 "mock" = super.buildPythonPackage {
945 945 name = "mock-1.0.1";
946 946 doCheck = false;
947 947 src = fetchurl {
948 948 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
949 949 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
950 950 };
951 951 meta = {
952 952 license = [ pkgs.lib.licenses.bsdOriginal ];
953 953 };
954 954 };
955 955 "more-itertools" = super.buildPythonPackage {
956 956 name = "more-itertools-5.0.0";
957 957 doCheck = false;
958 958 propagatedBuildInputs = [
959 959 self."six"
960 960 ];
961 961 src = fetchurl {
962 962 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
963 963 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
964 964 };
965 965 meta = {
966 966 license = [ pkgs.lib.licenses.mit ];
967 967 };
968 968 };
969 969 "msgpack-python" = super.buildPythonPackage {
970 970 name = "msgpack-python-0.5.6";
971 971 doCheck = false;
972 972 src = fetchurl {
973 973 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
974 974 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
975 975 };
976 976 meta = {
977 977 license = [ pkgs.lib.licenses.asl20 ];
978 978 };
979 979 };
980 980 "mysql-python" = super.buildPythonPackage {
981 981 name = "mysql-python-1.2.5";
982 982 doCheck = false;
983 983 src = fetchurl {
984 984 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
985 985 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
986 986 };
987 987 meta = {
988 988 license = [ pkgs.lib.licenses.gpl1 ];
989 989 };
990 990 };
991 991 "nbconvert" = super.buildPythonPackage {
992 992 name = "nbconvert-5.3.1";
993 993 doCheck = false;
994 994 propagatedBuildInputs = [
995 995 self."mistune"
996 996 self."jinja2"
997 997 self."pygments"
998 998 self."traitlets"
999 999 self."jupyter-core"
1000 1000 self."nbformat"
1001 1001 self."entrypoints"
1002 1002 self."bleach"
1003 1003 self."pandocfilters"
1004 1004 self."testpath"
1005 1005 ];
1006 1006 src = fetchurl {
1007 1007 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1008 1008 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1009 1009 };
1010 1010 meta = {
1011 1011 license = [ pkgs.lib.licenses.bsdOriginal ];
1012 1012 };
1013 1013 };
1014 1014 "nbformat" = super.buildPythonPackage {
1015 1015 name = "nbformat-4.4.0";
1016 1016 doCheck = false;
1017 1017 propagatedBuildInputs = [
1018 1018 self."ipython-genutils"
1019 1019 self."traitlets"
1020 1020 self."jsonschema"
1021 1021 self."jupyter-core"
1022 1022 ];
1023 1023 src = fetchurl {
1024 1024 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1025 1025 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1026 1026 };
1027 1027 meta = {
1028 1028 license = [ pkgs.lib.licenses.bsdOriginal ];
1029 1029 };
1030 1030 };
1031 1031 "packaging" = super.buildPythonPackage {
1032 1032 name = "packaging-15.2";
1033 1033 doCheck = false;
1034 1034 src = fetchurl {
1035 1035 url = "https://files.pythonhosted.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1036 1036 sha256 = "1zn60w84bxvw6wypffka18ca66pa1k2cfrq3cq8fnsfja5m3k4ng";
1037 1037 };
1038 1038 meta = {
1039 1039 license = [ pkgs.lib.licenses.asl20 ];
1040 1040 };
1041 1041 };
1042 1042 "pandocfilters" = super.buildPythonPackage {
1043 1043 name = "pandocfilters-1.4.2";
1044 1044 doCheck = false;
1045 1045 src = fetchurl {
1046 1046 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1047 1047 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1048 1048 };
1049 1049 meta = {
1050 1050 license = [ pkgs.lib.licenses.bsdOriginal ];
1051 1051 };
1052 1052 };
1053 1053 "paste" = super.buildPythonPackage {
1054 1054 name = "paste-3.0.8";
1055 1055 doCheck = false;
1056 1056 propagatedBuildInputs = [
1057 1057 self."six"
1058 1058 ];
1059 1059 src = fetchurl {
1060 1060 url = "https://files.pythonhosted.org/packages/66/65/e3acf1663438483c1f6ced0b6c6f3b90da9f0faacb0a6e2aa0f3f9f4b235/Paste-3.0.8.tar.gz";
1061 1061 sha256 = "05w1sh6ky4d7pmdb8nv82n13w22jcn3qsagg5ih3hjmbws9kkwf4";
1062 1062 };
1063 1063 meta = {
1064 1064 license = [ pkgs.lib.licenses.mit ];
1065 1065 };
1066 1066 };
1067 1067 "pastedeploy" = super.buildPythonPackage {
1068 1068 name = "pastedeploy-2.0.1";
1069 1069 doCheck = false;
1070 1070 src = fetchurl {
1071 1071 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
1072 1072 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
1073 1073 };
1074 1074 meta = {
1075 1075 license = [ pkgs.lib.licenses.mit ];
1076 1076 };
1077 1077 };
1078 1078 "pastescript" = super.buildPythonPackage {
1079 1079 name = "pastescript-3.1.0";
1080 1080 doCheck = false;
1081 1081 propagatedBuildInputs = [
1082 1082 self."paste"
1083 1083 self."pastedeploy"
1084 1084 self."six"
1085 1085 ];
1086 1086 src = fetchurl {
1087 1087 url = "https://files.pythonhosted.org/packages/9e/1d/14db1c283eb21a5d36b6ba1114c13b709629711e64acab653d9994fe346f/PasteScript-3.1.0.tar.gz";
1088 1088 sha256 = "02qcxjjr32ks7a6d4f533wl34ysc7yhwlrfcyqwqbzr52250v4fs";
1089 1089 };
1090 1090 meta = {
1091 1091 license = [ pkgs.lib.licenses.mit ];
1092 1092 };
1093 1093 };
1094 1094 "pathlib2" = super.buildPythonPackage {
1095 1095 name = "pathlib2-2.3.3";
1096 1096 doCheck = false;
1097 1097 propagatedBuildInputs = [
1098 1098 self."six"
1099 1099 self."scandir"
1100 1100 ];
1101 1101 src = fetchurl {
1102 1102 url = "https://files.pythonhosted.org/packages/bf/d7/a2568f4596b75d2c6e2b4094a7e64f620decc7887f69a1f2811931ea15b9/pathlib2-2.3.3.tar.gz";
1103 1103 sha256 = "0hpp92vqqgcd8h92msm9slv161b1q160igjwnkf2ag6cx0c96695";
1104 1104 };
1105 1105 meta = {
1106 1106 license = [ pkgs.lib.licenses.mit ];
1107 1107 };
1108 1108 };
1109 1109 "peppercorn" = super.buildPythonPackage {
1110 1110 name = "peppercorn-0.6";
1111 1111 doCheck = false;
1112 1112 src = fetchurl {
1113 1113 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1114 1114 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1115 1115 };
1116 1116 meta = {
1117 1117 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1118 1118 };
1119 1119 };
1120 1120 "pexpect" = super.buildPythonPackage {
1121 name = "pexpect-4.6.0";
1121 name = "pexpect-4.7.0";
1122 1122 doCheck = false;
1123 1123 propagatedBuildInputs = [
1124 1124 self."ptyprocess"
1125 1125 ];
1126 1126 src = fetchurl {
1127 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
1128 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
1127 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
1128 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
1129 1129 };
1130 1130 meta = {
1131 1131 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1132 1132 };
1133 1133 };
1134 1134 "pickleshare" = super.buildPythonPackage {
1135 1135 name = "pickleshare-0.7.5";
1136 1136 doCheck = false;
1137 1137 propagatedBuildInputs = [
1138 1138 self."pathlib2"
1139 1139 ];
1140 1140 src = fetchurl {
1141 1141 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1142 1142 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1143 1143 };
1144 1144 meta = {
1145 1145 license = [ pkgs.lib.licenses.mit ];
1146 1146 };
1147 1147 };
1148 1148 "plaster" = super.buildPythonPackage {
1149 1149 name = "plaster-1.0";
1150 1150 doCheck = false;
1151 1151 propagatedBuildInputs = [
1152 1152 self."setuptools"
1153 1153 ];
1154 1154 src = fetchurl {
1155 1155 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1156 1156 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1157 1157 };
1158 1158 meta = {
1159 1159 license = [ pkgs.lib.licenses.mit ];
1160 1160 };
1161 1161 };
1162 1162 "plaster-pastedeploy" = super.buildPythonPackage {
1163 name = "plaster-pastedeploy-0.6";
1163 name = "plaster-pastedeploy-0.7";
1164 1164 doCheck = false;
1165 1165 propagatedBuildInputs = [
1166 1166 self."pastedeploy"
1167 1167 self."plaster"
1168 1168 ];
1169 1169 src = fetchurl {
1170 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
1171 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
1170 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1171 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1172 1172 };
1173 1173 meta = {
1174 1174 license = [ pkgs.lib.licenses.mit ];
1175 1175 };
1176 1176 };
1177 1177 "pluggy" = super.buildPythonPackage {
1178 1178 name = "pluggy-0.9.0";
1179 1179 doCheck = false;
1180 1180 src = fetchurl {
1181 1181 url = "https://files.pythonhosted.org/packages/a7/8c/55c629849c64e665258d8976322dfdad171fa2f57117590662d8a67618a4/pluggy-0.9.0.tar.gz";
1182 1182 sha256 = "13yg2q0wgcb4l8lgdvcnzqa8db5lrw3nwn50lxjy1z5jkp7gkv0r";
1183 1183 };
1184 1184 meta = {
1185 1185 license = [ pkgs.lib.licenses.mit ];
1186 1186 };
1187 1187 };
1188 1188 "prompt-toolkit" = super.buildPythonPackage {
1189 name = "prompt-toolkit-1.0.15";
1189 name = "prompt-toolkit-1.0.16";
1190 1190 doCheck = false;
1191 1191 propagatedBuildInputs = [
1192 1192 self."six"
1193 1193 self."wcwidth"
1194 1194 ];
1195 1195 src = fetchurl {
1196 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
1197 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
1196 url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz";
1197 sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1";
1198 1198 };
1199 1199 meta = {
1200 1200 license = [ pkgs.lib.licenses.bsdOriginal ];
1201 1201 };
1202 1202 };
1203 1203 "psutil" = super.buildPythonPackage {
1204 1204 name = "psutil-5.5.1";
1205 1205 doCheck = false;
1206 1206 src = fetchurl {
1207 1207 url = "https://files.pythonhosted.org/packages/c7/01/7c30b247cdc5ba29623faa5c8cf1f1bbf7e041783c340414b0ed7e067c64/psutil-5.5.1.tar.gz";
1208 1208 sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj";
1209 1209 };
1210 1210 meta = {
1211 1211 license = [ pkgs.lib.licenses.bsdOriginal ];
1212 1212 };
1213 1213 };
1214 1214 "psycopg2" = super.buildPythonPackage {
1215 name = "psycopg2-2.7.7";
1215 name = "psycopg2-2.8.2";
1216 1216 doCheck = false;
1217 1217 src = fetchurl {
1218 url = "https://files.pythonhosted.org/packages/63/54/c039eb0f46f9a9406b59a638415c2012ad7be9b4b97bfddb1f48c280df3a/psycopg2-2.7.7.tar.gz";
1219 sha256 = "0zjbabb4qjx9dm07imhf8y5a9rpa06d5zah80myiimgdi83nslpl";
1218 url = "https://files.pythonhosted.org/packages/23/7e/93c325482c328619870b6cd09370f6dbe1148283daca65115cd63642e60f/psycopg2-2.8.2.tar.gz";
1219 sha256 = "122mn2z3r0zgs8jyq682jjjr6vq5690qmxqf22gj6g41dwdz5b2w";
1220 1220 };
1221 1221 meta = {
1222 1222 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1223 1223 };
1224 1224 };
1225 1225 "ptyprocess" = super.buildPythonPackage {
1226 1226 name = "ptyprocess-0.6.0";
1227 1227 doCheck = false;
1228 1228 src = fetchurl {
1229 1229 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1230 1230 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1231 1231 };
1232 1232 meta = {
1233 1233 license = [ ];
1234 1234 };
1235 1235 };
1236 1236 "py" = super.buildPythonPackage {
1237 1237 name = "py-1.6.0";
1238 1238 doCheck = false;
1239 1239 src = fetchurl {
1240 1240 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
1241 1241 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
1242 1242 };
1243 1243 meta = {
1244 1244 license = [ pkgs.lib.licenses.mit ];
1245 1245 };
1246 1246 };
1247 1247 "py-bcrypt" = super.buildPythonPackage {
1248 1248 name = "py-bcrypt-0.4";
1249 1249 doCheck = false;
1250 1250 src = fetchurl {
1251 1251 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1252 1252 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1253 1253 };
1254 1254 meta = {
1255 1255 license = [ pkgs.lib.licenses.bsdOriginal ];
1256 1256 };
1257 1257 };
1258 1258 "py-gfm" = super.buildPythonPackage {
1259 1259 name = "py-gfm-0.1.4";
1260 1260 doCheck = false;
1261 1261 propagatedBuildInputs = [
1262 1262 self."setuptools"
1263 1263 self."markdown"
1264 1264 ];
1265 1265 src = fetchurl {
1266 1266 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1267 1267 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1268 1268 };
1269 1269 meta = {
1270 1270 license = [ pkgs.lib.licenses.bsdOriginal ];
1271 1271 };
1272 1272 };
1273 1273 "pyasn1" = super.buildPythonPackage {
1274 1274 name = "pyasn1-0.4.5";
1275 1275 doCheck = false;
1276 1276 src = fetchurl {
1277 1277 url = "https://files.pythonhosted.org/packages/46/60/b7e32f6ff481b8a1f6c8f02b0fd9b693d1c92ddd2efb038ec050d99a7245/pyasn1-0.4.5.tar.gz";
1278 1278 sha256 = "1xqh3jh2nfi2bflk5a0vn59y3pp1vn54f3ksx652sid92gz2096s";
1279 1279 };
1280 1280 meta = {
1281 1281 license = [ pkgs.lib.licenses.bsdOriginal ];
1282 1282 };
1283 1283 };
1284 1284 "pyasn1-modules" = super.buildPythonPackage {
1285 1285 name = "pyasn1-modules-0.2.4";
1286 1286 doCheck = false;
1287 1287 propagatedBuildInputs = [
1288 1288 self."pyasn1"
1289 1289 ];
1290 1290 src = fetchurl {
1291 1291 url = "https://files.pythonhosted.org/packages/bd/a5/ef7bf693e8a8f015386c9167483199f54f8a8ec01d1c737e05524f16e792/pyasn1-modules-0.2.4.tar.gz";
1292 1292 sha256 = "0z3w5dqrrvdplg9ma45j8n23xvyrj9ki8mg4ibqbn7l4qpl90855";
1293 1293 };
1294 1294 meta = {
1295 1295 license = [ pkgs.lib.licenses.bsdOriginal ];
1296 1296 };
1297 1297 };
1298 1298 "pycparser" = super.buildPythonPackage {
1299 1299 name = "pycparser-2.19";
1300 1300 doCheck = false;
1301 1301 src = fetchurl {
1302 1302 url = "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz";
1303 1303 sha256 = "1cr5dcj9628lkz1qlwq3fv97c25363qppkmcayqvd05dpy573259";
1304 1304 };
1305 1305 meta = {
1306 1306 license = [ pkgs.lib.licenses.bsdOriginal ];
1307 1307 };
1308 1308 };
1309 1309 "pycrypto" = super.buildPythonPackage {
1310 1310 name = "pycrypto-2.6.1";
1311 1311 doCheck = false;
1312 1312 src = fetchurl {
1313 1313 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1314 1314 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1315 1315 };
1316 1316 meta = {
1317 1317 license = [ pkgs.lib.licenses.publicDomain ];
1318 1318 };
1319 1319 };
1320 1320 "pycurl" = super.buildPythonPackage {
1321 1321 name = "pycurl-7.43.0.2";
1322 1322 doCheck = false;
1323 1323 src = fetchurl {
1324 1324 url = "https://files.pythonhosted.org/packages/e8/e4/0dbb8735407189f00b33d84122b9be52c790c7c3b25286826f4e1bdb7bde/pycurl-7.43.0.2.tar.gz";
1325 1325 sha256 = "1915kb04k1j4y6k1dx1sgnbddxrl9r1n4q928if2lkrdm73xy30g";
1326 1326 };
1327 1327 meta = {
1328 1328 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1329 1329 };
1330 1330 };
1331 1331 "pygments" = super.buildPythonPackage {
1332 1332 name = "pygments-2.3.1";
1333 1333 doCheck = false;
1334 1334 src = fetchurl {
1335 1335 url = "https://files.pythonhosted.org/packages/64/69/413708eaf3a64a6abb8972644e0f20891a55e621c6759e2c3f3891e05d63/Pygments-2.3.1.tar.gz";
1336 1336 sha256 = "0ji87g09jph8jqcvclgb02qvxasdnr9pzvk90rl66d90yqcxmyjz";
1337 1337 };
1338 1338 meta = {
1339 1339 license = [ pkgs.lib.licenses.bsdOriginal ];
1340 1340 };
1341 1341 };
1342 1342 "pymysql" = super.buildPythonPackage {
1343 1343 name = "pymysql-0.8.1";
1344 1344 doCheck = false;
1345 1345 src = fetchurl {
1346 1346 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1347 1347 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1348 1348 };
1349 1349 meta = {
1350 1350 license = [ pkgs.lib.licenses.mit ];
1351 1351 };
1352 1352 };
1353 1353 "pyotp" = super.buildPythonPackage {
1354 1354 name = "pyotp-2.2.7";
1355 1355 doCheck = false;
1356 1356 src = fetchurl {
1357 1357 url = "https://files.pythonhosted.org/packages/b1/ab/477cda97b6ca7baced5106471cb1ac1fe698d1b035983b9f8ee3422989eb/pyotp-2.2.7.tar.gz";
1358 1358 sha256 = "00p69nw431f0s2ilg0hnd77p1l22m06p9rq4f8zfapmavnmzw3xy";
1359 1359 };
1360 1360 meta = {
1361 1361 license = [ pkgs.lib.licenses.mit ];
1362 1362 };
1363 1363 };
1364 1364 "pyparsing" = super.buildPythonPackage {
1365 1365 name = "pyparsing-2.3.0";
1366 1366 doCheck = false;
1367 1367 src = fetchurl {
1368 1368 url = "https://files.pythonhosted.org/packages/d0/09/3e6a5eeb6e04467b737d55f8bba15247ac0876f98fae659e58cd744430c6/pyparsing-2.3.0.tar.gz";
1369 1369 sha256 = "14k5v7n3xqw8kzf42x06bzp184spnlkya2dpjyflax6l3yrallzk";
1370 1370 };
1371 1371 meta = {
1372 1372 license = [ pkgs.lib.licenses.mit ];
1373 1373 };
1374 1374 };
1375 1375 "pyramid" = super.buildPythonPackage {
1376 name = "pyramid-1.10.2";
1376 name = "pyramid-1.10.4";
1377 1377 doCheck = false;
1378 1378 propagatedBuildInputs = [
1379 1379 self."hupper"
1380 1380 self."plaster"
1381 1381 self."plaster-pastedeploy"
1382 1382 self."setuptools"
1383 1383 self."translationstring"
1384 1384 self."venusian"
1385 1385 self."webob"
1386 1386 self."zope.deprecation"
1387 1387 self."zope.interface"
1388 1388 self."repoze.lru"
1389 1389 ];
1390 1390 src = fetchurl {
1391 url = "https://files.pythonhosted.org/packages/bc/0e/73de9b189ff00a963beeedaff90e27b134eedf2806279a1a3fe122fd65b6/pyramid-1.10.2.tar.gz";
1392 sha256 = "0gn6sw6ml67ir150ffivc0ad5hd448p43p9z2bkyp12jh2n9n2p7";
1391 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1392 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1393 1393 };
1394 1394 meta = {
1395 1395 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1396 1396 };
1397 1397 };
1398 1398 "pyramid-beaker" = super.buildPythonPackage {
1399 1399 name = "pyramid-beaker-0.8";
1400 1400 doCheck = false;
1401 1401 propagatedBuildInputs = [
1402 1402 self."pyramid"
1403 1403 self."beaker"
1404 1404 ];
1405 1405 src = fetchurl {
1406 1406 url = "https://files.pythonhosted.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1407 1407 sha256 = "0hflx3qkcdml1mwpq53sz46s7jickpfn0zy0ns2c7j445j66bp3p";
1408 1408 };
1409 1409 meta = {
1410 1410 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1411 1411 };
1412 1412 };
1413 1413 "pyramid-debugtoolbar" = super.buildPythonPackage {
1414 1414 name = "pyramid-debugtoolbar-4.5";
1415 1415 doCheck = false;
1416 1416 propagatedBuildInputs = [
1417 1417 self."pyramid"
1418 1418 self."pyramid-mako"
1419 1419 self."repoze.lru"
1420 1420 self."pygments"
1421 1421 self."ipaddress"
1422 1422 ];
1423 1423 src = fetchurl {
1424 1424 url = "https://files.pythonhosted.org/packages/14/28/1f240239af340d19ee271ac62958158c79edb01a44ad8c9885508dd003d2/pyramid_debugtoolbar-4.5.tar.gz";
1425 1425 sha256 = "0x2p3409pnx66n6dx5vc0mk2r1cp1ydr8mp120w44r9pwcngbibl";
1426 1426 };
1427 1427 meta = {
1428 1428 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1429 1429 };
1430 1430 };
1431 1431 "pyramid-jinja2" = super.buildPythonPackage {
1432 1432 name = "pyramid-jinja2-2.7";
1433 1433 doCheck = false;
1434 1434 propagatedBuildInputs = [
1435 1435 self."pyramid"
1436 1436 self."zope.deprecation"
1437 1437 self."jinja2"
1438 1438 self."markupsafe"
1439 1439 ];
1440 1440 src = fetchurl {
1441 1441 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1442 1442 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1443 1443 };
1444 1444 meta = {
1445 1445 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1446 1446 };
1447 1447 };
1448 1448 "pyramid-mailer" = super.buildPythonPackage {
1449 1449 name = "pyramid-mailer-0.15.1";
1450 1450 doCheck = false;
1451 1451 propagatedBuildInputs = [
1452 1452 self."pyramid"
1453 1453 self."repoze.sendmail"
1454 1454 self."transaction"
1455 1455 ];
1456 1456 src = fetchurl {
1457 1457 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1458 1458 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1459 1459 };
1460 1460 meta = {
1461 1461 license = [ pkgs.lib.licenses.bsdOriginal ];
1462 1462 };
1463 1463 };
1464 1464 "pyramid-mako" = super.buildPythonPackage {
1465 1465 name = "pyramid-mako-1.0.2";
1466 1466 doCheck = false;
1467 1467 propagatedBuildInputs = [
1468 1468 self."pyramid"
1469 1469 self."mako"
1470 1470 ];
1471 1471 src = fetchurl {
1472 1472 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1473 1473 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
1474 1474 };
1475 1475 meta = {
1476 1476 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1477 1477 };
1478 1478 };
1479 1479 "pysqlite" = super.buildPythonPackage {
1480 1480 name = "pysqlite-2.8.3";
1481 1481 doCheck = false;
1482 1482 src = fetchurl {
1483 1483 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1484 1484 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1485 1485 };
1486 1486 meta = {
1487 1487 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1488 1488 };
1489 1489 };
1490 1490 "pytest" = super.buildPythonPackage {
1491 1491 name = "pytest-3.8.2";
1492 1492 doCheck = false;
1493 1493 propagatedBuildInputs = [
1494 1494 self."py"
1495 1495 self."six"
1496 1496 self."setuptools"
1497 1497 self."attrs"
1498 1498 self."more-itertools"
1499 1499 self."atomicwrites"
1500 1500 self."pluggy"
1501 1501 self."funcsigs"
1502 1502 self."pathlib2"
1503 1503 ];
1504 1504 src = fetchurl {
1505 1505 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
1506 1506 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
1507 1507 };
1508 1508 meta = {
1509 1509 license = [ pkgs.lib.licenses.mit ];
1510 1510 };
1511 1511 };
1512 1512 "pytest-cov" = super.buildPythonPackage {
1513 1513 name = "pytest-cov-2.6.0";
1514 1514 doCheck = false;
1515 1515 propagatedBuildInputs = [
1516 1516 self."pytest"
1517 1517 self."coverage"
1518 1518 ];
1519 1519 src = fetchurl {
1520 1520 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
1521 1521 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
1522 1522 };
1523 1523 meta = {
1524 1524 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1525 1525 };
1526 1526 };
1527 1527 "pytest-profiling" = super.buildPythonPackage {
1528 1528 name = "pytest-profiling-1.3.0";
1529 1529 doCheck = false;
1530 1530 propagatedBuildInputs = [
1531 1531 self."six"
1532 1532 self."pytest"
1533 1533 self."gprof2dot"
1534 1534 ];
1535 1535 src = fetchurl {
1536 1536 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
1537 1537 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
1538 1538 };
1539 1539 meta = {
1540 1540 license = [ pkgs.lib.licenses.mit ];
1541 1541 };
1542 1542 };
1543 1543 "pytest-runner" = super.buildPythonPackage {
1544 1544 name = "pytest-runner-4.2";
1545 1545 doCheck = false;
1546 1546 src = fetchurl {
1547 1547 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
1548 1548 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
1549 1549 };
1550 1550 meta = {
1551 1551 license = [ pkgs.lib.licenses.mit ];
1552 1552 };
1553 1553 };
1554 1554 "pytest-sugar" = super.buildPythonPackage {
1555 1555 name = "pytest-sugar-0.9.1";
1556 1556 doCheck = false;
1557 1557 propagatedBuildInputs = [
1558 1558 self."pytest"
1559 1559 self."termcolor"
1560 1560 ];
1561 1561 src = fetchurl {
1562 1562 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
1563 1563 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
1564 1564 };
1565 1565 meta = {
1566 1566 license = [ pkgs.lib.licenses.bsdOriginal ];
1567 1567 };
1568 1568 };
1569 1569 "pytest-timeout" = super.buildPythonPackage {
1570 1570 name = "pytest-timeout-1.3.2";
1571 1571 doCheck = false;
1572 1572 propagatedBuildInputs = [
1573 1573 self."pytest"
1574 1574 ];
1575 1575 src = fetchurl {
1576 1576 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
1577 1577 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
1578 1578 };
1579 1579 meta = {
1580 1580 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1581 1581 };
1582 1582 };
1583 1583 "python-dateutil" = super.buildPythonPackage {
1584 1584 name = "python-dateutil-2.8.0";
1585 1585 doCheck = false;
1586 1586 propagatedBuildInputs = [
1587 1587 self."six"
1588 1588 ];
1589 1589 src = fetchurl {
1590 1590 url = "https://files.pythonhosted.org/packages/ad/99/5b2e99737edeb28c71bcbec5b5dda19d0d9ef3ca3e92e3e925e7c0bb364c/python-dateutil-2.8.0.tar.gz";
1591 1591 sha256 = "17nsfhy4xdz1khrfxa61vd7pmvd5z0wa3zb6v4gb4kfnykv0b668";
1592 1592 };
1593 1593 meta = {
1594 1594 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1595 1595 };
1596 1596 };
1597 1597 "python-editor" = super.buildPythonPackage {
1598 1598 name = "python-editor-1.0.4";
1599 1599 doCheck = false;
1600 1600 src = fetchurl {
1601 1601 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1602 1602 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1603 1603 };
1604 1604 meta = {
1605 1605 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1606 1606 };
1607 1607 };
1608 1608 "python-ldap" = super.buildPythonPackage {
1609 1609 name = "python-ldap-3.1.0";
1610 1610 doCheck = false;
1611 1611 propagatedBuildInputs = [
1612 1612 self."pyasn1"
1613 1613 self."pyasn1-modules"
1614 1614 ];
1615 1615 src = fetchurl {
1616 1616 url = "https://files.pythonhosted.org/packages/7f/1c/28d721dff2fcd2fef9d55b40df63a00be26ec8a11e8c6fc612ae642f9cfd/python-ldap-3.1.0.tar.gz";
1617 1617 sha256 = "1i97nwfnraylyn0myxlf3vciicrf5h6fymrcff9c00k581wmx5s1";
1618 1618 };
1619 1619 meta = {
1620 1620 license = [ pkgs.lib.licenses.psfl ];
1621 1621 };
1622 1622 };
1623 1623 "python-memcached" = super.buildPythonPackage {
1624 1624 name = "python-memcached-1.59";
1625 1625 doCheck = false;
1626 1626 propagatedBuildInputs = [
1627 1627 self."six"
1628 1628 ];
1629 1629 src = fetchurl {
1630 1630 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1631 1631 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1632 1632 };
1633 1633 meta = {
1634 1634 license = [ pkgs.lib.licenses.psfl ];
1635 1635 };
1636 1636 };
1637 1637 "python-pam" = super.buildPythonPackage {
1638 1638 name = "python-pam-1.8.4";
1639 1639 doCheck = false;
1640 1640 src = fetchurl {
1641 1641 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1642 1642 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1643 1643 };
1644 1644 meta = {
1645 1645 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1646 1646 };
1647 1647 };
1648 1648 "python-saml" = super.buildPythonPackage {
1649 1649 name = "python-saml-2.4.2";
1650 1650 doCheck = false;
1651 1651 propagatedBuildInputs = [
1652 1652 self."dm.xmlsec.binding"
1653 1653 self."isodate"
1654 1654 self."defusedxml"
1655 1655 ];
1656 1656 src = fetchurl {
1657 1657 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1658 1658 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1659 1659 };
1660 1660 meta = {
1661 1661 license = [ pkgs.lib.licenses.mit ];
1662 1662 };
1663 1663 };
1664 1664 "pytz" = super.buildPythonPackage {
1665 1665 name = "pytz-2018.4";
1666 1666 doCheck = false;
1667 1667 src = fetchurl {
1668 1668 url = "https://files.pythonhosted.org/packages/10/76/52efda4ef98e7544321fd8d5d512e11739c1df18b0649551aeccfb1c8376/pytz-2018.4.tar.gz";
1669 1669 sha256 = "0jgpqx3kk2rhv81j1izjxvmx8d0x7hzs1857pgqnixic5wq2ar60";
1670 1670 };
1671 1671 meta = {
1672 1672 license = [ pkgs.lib.licenses.mit ];
1673 1673 };
1674 1674 };
1675 1675 "pyzmq" = super.buildPythonPackage {
1676 1676 name = "pyzmq-14.6.0";
1677 1677 doCheck = false;
1678 1678 src = fetchurl {
1679 1679 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1680 1680 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1681 1681 };
1682 1682 meta = {
1683 1683 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1684 1684 };
1685 1685 };
1686 1686 "redis" = super.buildPythonPackage {
1687 1687 name = "redis-2.10.6";
1688 1688 doCheck = false;
1689 1689 src = fetchurl {
1690 1690 url = "https://files.pythonhosted.org/packages/09/8d/6d34b75326bf96d4139a2ddd8e74b80840f800a0a79f9294399e212cb9a7/redis-2.10.6.tar.gz";
1691 1691 sha256 = "03vcgklykny0g0wpvqmy8p6azi2s078317wgb2xjv5m2rs9sjb52";
1692 1692 };
1693 1693 meta = {
1694 1694 license = [ pkgs.lib.licenses.mit ];
1695 1695 };
1696 1696 };
1697 1697 "repoze.lru" = super.buildPythonPackage {
1698 1698 name = "repoze.lru-0.7";
1699 1699 doCheck = false;
1700 1700 src = fetchurl {
1701 1701 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1702 1702 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1703 1703 };
1704 1704 meta = {
1705 1705 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1706 1706 };
1707 1707 };
1708 1708 "repoze.sendmail" = super.buildPythonPackage {
1709 1709 name = "repoze.sendmail-4.4.1";
1710 1710 doCheck = false;
1711 1711 propagatedBuildInputs = [
1712 1712 self."setuptools"
1713 1713 self."zope.interface"
1714 1714 self."transaction"
1715 1715 ];
1716 1716 src = fetchurl {
1717 1717 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1718 1718 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1719 1719 };
1720 1720 meta = {
1721 1721 license = [ pkgs.lib.licenses.zpl21 ];
1722 1722 };
1723 1723 };
1724 1724 "requests" = super.buildPythonPackage {
1725 1725 name = "requests-2.9.1";
1726 1726 doCheck = false;
1727 1727 src = fetchurl {
1728 1728 url = "https://files.pythonhosted.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1729 1729 sha256 = "0zsqrzlybf25xscgi7ja4s48y2abf9wvjkn47wh984qgs1fq2xy5";
1730 1730 };
1731 1731 meta = {
1732 1732 license = [ pkgs.lib.licenses.asl20 ];
1733 1733 };
1734 1734 };
1735 1735 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1736 1736 name = "rhodecode-enterprise-ce-4.17.0";
1737 1737 buildInputs = [
1738 1738 self."pytest"
1739 1739 self."py"
1740 1740 self."pytest-cov"
1741 1741 self."pytest-sugar"
1742 1742 self."pytest-runner"
1743 1743 self."pytest-profiling"
1744 1744 self."pytest-timeout"
1745 1745 self."gprof2dot"
1746 1746 self."mock"
1747 1747 self."cov-core"
1748 1748 self."coverage"
1749 1749 self."webtest"
1750 1750 self."beautifulsoup4"
1751 1751 self."configobj"
1752 1752 ];
1753 1753 doCheck = true;
1754 1754 propagatedBuildInputs = [
1755 1755 self."amqp"
1756 1756 self."authomatic"
1757 1757 self."babel"
1758 1758 self."beaker"
1759 1759 self."bleach"
1760 1760 self."celery"
1761 1761 self."channelstream"
1762 1762 self."click"
1763 1763 self."colander"
1764 1764 self."configobj"
1765 1765 self."cssselect"
1766 1766 self."cryptography"
1767 1767 self."decorator"
1768 1768 self."deform"
1769 1769 self."docutils"
1770 1770 self."dogpile.cache"
1771 1771 self."dogpile.core"
1772 1772 self."formencode"
1773 1773 self."future"
1774 1774 self."futures"
1775 1775 self."infrae.cache"
1776 1776 self."iso8601"
1777 1777 self."itsdangerous"
1778 1778 self."kombu"
1779 1779 self."lxml"
1780 1780 self."mako"
1781 1781 self."markdown"
1782 1782 self."markupsafe"
1783 1783 self."msgpack-python"
1784 1784 self."pyotp"
1785 1785 self."packaging"
1786 1786 self."paste"
1787 1787 self."pastedeploy"
1788 1788 self."pastescript"
1789 1789 self."peppercorn"
1790 1790 self."psutil"
1791 1791 self."py-bcrypt"
1792 1792 self."pycurl"
1793 1793 self."pycrypto"
1794 1794 self."pygments"
1795 1795 self."pyparsing"
1796 1796 self."pyramid-beaker"
1797 1797 self."pyramid-debugtoolbar"
1798 1798 self."pyramid-mako"
1799 1799 self."pyramid"
1800 1800 self."pyramid-mailer"
1801 1801 self."python-dateutil"
1802 1802 self."python-ldap"
1803 1803 self."python-memcached"
1804 1804 self."python-pam"
1805 1805 self."python-saml"
1806 1806 self."pytz"
1807 1807 self."tzlocal"
1808 1808 self."pyzmq"
1809 1809 self."py-gfm"
1810 1810 self."redis"
1811 1811 self."repoze.lru"
1812 1812 self."requests"
1813 1813 self."routes"
1814 1814 self."simplejson"
1815 1815 self."six"
1816 1816 self."sqlalchemy"
1817 1817 self."sshpubkeys"
1818 1818 self."subprocess32"
1819 1819 self."supervisor"
1820 1820 self."translationstring"
1821 1821 self."urllib3"
1822 1822 self."urlobject"
1823 1823 self."venusian"
1824 1824 self."weberror"
1825 1825 self."webhelpers2"
1826 1826 self."webhelpers"
1827 1827 self."webob"
1828 1828 self."whoosh"
1829 1829 self."wsgiref"
1830 1830 self."zope.cachedescriptors"
1831 1831 self."zope.deprecation"
1832 1832 self."zope.event"
1833 1833 self."zope.interface"
1834 1834 self."mysql-python"
1835 1835 self."pymysql"
1836 1836 self."pysqlite"
1837 1837 self."psycopg2"
1838 1838 self."nbconvert"
1839 1839 self."nbformat"
1840 1840 self."jupyter-client"
1841 1841 self."alembic"
1842 1842 self."invoke"
1843 1843 self."bumpversion"
1844 1844 self."gevent"
1845 1845 self."greenlet"
1846 1846 self."gunicorn"
1847 1847 self."waitress"
1848 1848 self."ipdb"
1849 1849 self."ipython"
1850 1850 self."rhodecode-tools"
1851 1851 self."appenlight-client"
1852 1852 self."pytest"
1853 1853 self."py"
1854 1854 self."pytest-cov"
1855 1855 self."pytest-sugar"
1856 1856 self."pytest-runner"
1857 1857 self."pytest-profiling"
1858 1858 self."pytest-timeout"
1859 1859 self."gprof2dot"
1860 1860 self."mock"
1861 1861 self."cov-core"
1862 1862 self."coverage"
1863 1863 self."webtest"
1864 1864 self."beautifulsoup4"
1865 1865 ];
1866 1866 src = ./.;
1867 1867 meta = {
1868 1868 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1869 1869 };
1870 1870 };
1871 1871 "rhodecode-tools" = super.buildPythonPackage {
1872 1872 name = "rhodecode-tools-1.2.1";
1873 1873 doCheck = false;
1874 1874 propagatedBuildInputs = [
1875 1875 self."click"
1876 1876 self."future"
1877 1877 self."six"
1878 1878 self."mako"
1879 1879 self."markupsafe"
1880 1880 self."requests"
1881 1881 self."urllib3"
1882 1882 self."whoosh"
1883 1883 self."elasticsearch"
1884 1884 self."elasticsearch-dsl"
1885 1885 self."elasticsearch2"
1886 1886 self."elasticsearch1-dsl"
1887 1887 ];
1888 1888 src = fetchurl {
1889 1889 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v1.2.1.tar.gz?md5=25bc2f7de1da318e547236d3fb463d28";
1890 1890 sha256 = "1k8l3s4mvshza1zay6dfxprq54fyb5dc85dqdva9wa3f466y0adk";
1891 1891 };
1892 1892 meta = {
1893 1893 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
1894 1894 };
1895 1895 };
1896 1896 "routes" = super.buildPythonPackage {
1897 1897 name = "routes-2.4.1";
1898 1898 doCheck = false;
1899 1899 propagatedBuildInputs = [
1900 1900 self."six"
1901 1901 self."repoze.lru"
1902 1902 ];
1903 1903 src = fetchurl {
1904 1904 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
1905 1905 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
1906 1906 };
1907 1907 meta = {
1908 1908 license = [ pkgs.lib.licenses.mit ];
1909 1909 };
1910 1910 };
1911 1911 "scandir" = super.buildPythonPackage {
1912 1912 name = "scandir-1.10.0";
1913 1913 doCheck = false;
1914 1914 src = fetchurl {
1915 1915 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
1916 1916 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
1917 1917 };
1918 1918 meta = {
1919 1919 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1920 1920 };
1921 1921 };
1922 1922 "setproctitle" = super.buildPythonPackage {
1923 1923 name = "setproctitle-1.1.10";
1924 1924 doCheck = false;
1925 1925 src = fetchurl {
1926 1926 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
1927 1927 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
1928 1928 };
1929 1929 meta = {
1930 1930 license = [ pkgs.lib.licenses.bsdOriginal ];
1931 1931 };
1932 1932 };
1933 1933 "setuptools" = super.buildPythonPackage {
1934 name = "setuptools-40.8.0";
1934 name = "setuptools-41.0.0";
1935 1935 doCheck = false;
1936 1936 src = fetchurl {
1937 url = "https://files.pythonhosted.org/packages/c2/f7/c7b501b783e5a74cf1768bc174ee4fb0a8a6ee5af6afa92274ff964703e0/setuptools-40.8.0.zip";
1938 sha256 = "0k9hifpgahnw2a26w3cr346iy733k6d3nwh3f7g9m13y6f8fqkkf";
1937 url = "https://files.pythonhosted.org/packages/ed/69/c805067de1feedbb98c53174b0f2df44cc05e0e9ee73bb85eebc59e508c6/setuptools-41.0.0.zip";
1938 sha256 = "1cfwy2g23qj3262ivj0b1182lgwz7bqqbka35rkqwypynra05lvr";
1939 1939 };
1940 1940 meta = {
1941 1941 license = [ pkgs.lib.licenses.mit ];
1942 1942 };
1943 1943 };
1944 1944 "simplegeneric" = super.buildPythonPackage {
1945 1945 name = "simplegeneric-0.8.1";
1946 1946 doCheck = false;
1947 1947 src = fetchurl {
1948 1948 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1949 1949 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
1950 1950 };
1951 1951 meta = {
1952 1952 license = [ pkgs.lib.licenses.zpl21 ];
1953 1953 };
1954 1954 };
1955 1955 "simplejson" = super.buildPythonPackage {
1956 1956 name = "simplejson-3.16.0";
1957 1957 doCheck = false;
1958 1958 src = fetchurl {
1959 1959 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
1960 1960 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
1961 1961 };
1962 1962 meta = {
1963 1963 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1964 1964 };
1965 1965 };
1966 1966 "six" = super.buildPythonPackage {
1967 1967 name = "six-1.11.0";
1968 1968 doCheck = false;
1969 1969 src = fetchurl {
1970 1970 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
1971 1971 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
1972 1972 };
1973 1973 meta = {
1974 1974 license = [ pkgs.lib.licenses.mit ];
1975 1975 };
1976 1976 };
1977 1977 "sqlalchemy" = super.buildPythonPackage {
1978 1978 name = "sqlalchemy-1.1.18";
1979 1979 doCheck = false;
1980 1980 src = fetchurl {
1981 1981 url = "https://files.pythonhosted.org/packages/cc/4d/96d93ff77cd67aca7618e402191eee3490d8f5f245d6ab7622d35fe504f4/SQLAlchemy-1.1.18.tar.gz";
1982 1982 sha256 = "1ab4ysip6irajfbxl9wy27kv76miaz8h6759hfx92499z4dcf3lb";
1983 1983 };
1984 1984 meta = {
1985 1985 license = [ pkgs.lib.licenses.mit ];
1986 1986 };
1987 1987 };
1988 1988 "sshpubkeys" = super.buildPythonPackage {
1989 1989 name = "sshpubkeys-3.1.0";
1990 1990 doCheck = false;
1991 1991 propagatedBuildInputs = [
1992 1992 self."cryptography"
1993 1993 self."ecdsa"
1994 1994 ];
1995 1995 src = fetchurl {
1996 1996 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
1997 1997 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
1998 1998 };
1999 1999 meta = {
2000 2000 license = [ pkgs.lib.licenses.bsdOriginal ];
2001 2001 };
2002 2002 };
2003 2003 "subprocess32" = super.buildPythonPackage {
2004 2004 name = "subprocess32-3.5.3";
2005 2005 doCheck = false;
2006 2006 src = fetchurl {
2007 2007 url = "https://files.pythonhosted.org/packages/be/2b/beeba583e9877e64db10b52a96915afc0feabf7144dcbf2a0d0ea68bf73d/subprocess32-3.5.3.tar.gz";
2008 2008 sha256 = "1hr5fan8i719hmlmz73hf8rhq74014w07d8ryg7krvvf6692kj3b";
2009 2009 };
2010 2010 meta = {
2011 2011 license = [ pkgs.lib.licenses.psfl ];
2012 2012 };
2013 2013 };
2014 2014 "supervisor" = super.buildPythonPackage {
2015 name = "supervisor-3.3.5";
2015 name = "supervisor-4.0.1";
2016 2016 doCheck = false;
2017 2017 propagatedBuildInputs = [
2018 2018 self."meld3"
2019 2019 ];
2020 2020 src = fetchurl {
2021 url = "https://files.pythonhosted.org/packages/ba/65/92575a8757ed576beaee59251f64a3287bde82bdc03964b89df9e1d29e1b/supervisor-3.3.5.tar.gz";
2022 sha256 = "1w3ahridzbc6rxfpbyx8lij6pjlcgf2ymzyg53llkjqxalp6sk8v";
2021 url = "https://files.pythonhosted.org/packages/96/ec/f8190beeb0c6d29a30aea10389c11d0164b6ff221931ee84093315ecde6a/supervisor-4.0.1.tar.gz";
2022 sha256 = "10l3z7v6v1fyv7m5zbazzxciwvli2n9a41pxi27p4kixgsfp0s1j";
2023 2023 };
2024 2024 meta = {
2025 2025 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2026 2026 };
2027 2027 };
2028 2028 "tempita" = super.buildPythonPackage {
2029 2029 name = "tempita-0.5.2";
2030 2030 doCheck = false;
2031 2031 src = fetchurl {
2032 2032 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2033 2033 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2034 2034 };
2035 2035 meta = {
2036 2036 license = [ pkgs.lib.licenses.mit ];
2037 2037 };
2038 2038 };
2039 2039 "termcolor" = super.buildPythonPackage {
2040 2040 name = "termcolor-1.1.0";
2041 2041 doCheck = false;
2042 2042 src = fetchurl {
2043 2043 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2044 2044 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2045 2045 };
2046 2046 meta = {
2047 2047 license = [ pkgs.lib.licenses.mit ];
2048 2048 };
2049 2049 };
2050 2050 "testpath" = super.buildPythonPackage {
2051 2051 name = "testpath-0.4.2";
2052 2052 doCheck = false;
2053 2053 src = fetchurl {
2054 2054 url = "https://files.pythonhosted.org/packages/06/30/9a7e917066d851d8b4117e85794b5f14516419ea714a8a2681ec6aa8a981/testpath-0.4.2.tar.gz";
2055 2055 sha256 = "1y40hywscnnyb734pnzm55nd8r8kp1072bjxbil83gcd53cv755n";
2056 2056 };
2057 2057 meta = {
2058 2058 license = [ ];
2059 2059 };
2060 2060 };
2061 2061 "traitlets" = super.buildPythonPackage {
2062 2062 name = "traitlets-4.3.2";
2063 2063 doCheck = false;
2064 2064 propagatedBuildInputs = [
2065 2065 self."ipython-genutils"
2066 2066 self."six"
2067 2067 self."decorator"
2068 2068 self."enum34"
2069 2069 ];
2070 2070 src = fetchurl {
2071 2071 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
2072 2072 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
2073 2073 };
2074 2074 meta = {
2075 2075 license = [ pkgs.lib.licenses.bsdOriginal ];
2076 2076 };
2077 2077 };
2078 2078 "transaction" = super.buildPythonPackage {
2079 2079 name = "transaction-2.4.0";
2080 2080 doCheck = false;
2081 2081 propagatedBuildInputs = [
2082 2082 self."zope.interface"
2083 2083 ];
2084 2084 src = fetchurl {
2085 2085 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2086 2086 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2087 2087 };
2088 2088 meta = {
2089 2089 license = [ pkgs.lib.licenses.zpl21 ];
2090 2090 };
2091 2091 };
2092 2092 "translationstring" = super.buildPythonPackage {
2093 2093 name = "translationstring-1.3";
2094 2094 doCheck = false;
2095 2095 src = fetchurl {
2096 2096 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2097 2097 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2098 2098 };
2099 2099 meta = {
2100 2100 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2101 2101 };
2102 2102 };
2103 2103 "tzlocal" = super.buildPythonPackage {
2104 2104 name = "tzlocal-1.5.1";
2105 2105 doCheck = false;
2106 2106 propagatedBuildInputs = [
2107 2107 self."pytz"
2108 2108 ];
2109 2109 src = fetchurl {
2110 2110 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2111 2111 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2112 2112 };
2113 2113 meta = {
2114 2114 license = [ pkgs.lib.licenses.mit ];
2115 2115 };
2116 2116 };
2117 2117 "urllib3" = super.buildPythonPackage {
2118 2118 name = "urllib3-1.24.1";
2119 2119 doCheck = false;
2120 2120 src = fetchurl {
2121 2121 url = "https://files.pythonhosted.org/packages/b1/53/37d82ab391393565f2f831b8eedbffd57db5a718216f82f1a8b4d381a1c1/urllib3-1.24.1.tar.gz";
2122 2122 sha256 = "08lwd9f3hqznyf32vnzwvp87pchx062nkbgyrf67rwlkgj0jk5fy";
2123 2123 };
2124 2124 meta = {
2125 2125 license = [ pkgs.lib.licenses.mit ];
2126 2126 };
2127 2127 };
2128 2128 "urlobject" = super.buildPythonPackage {
2129 2129 name = "urlobject-2.4.3";
2130 2130 doCheck = false;
2131 2131 src = fetchurl {
2132 2132 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2133 2133 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2134 2134 };
2135 2135 meta = {
2136 2136 license = [ pkgs.lib.licenses.publicDomain ];
2137 2137 };
2138 2138 };
2139 2139 "venusian" = super.buildPythonPackage {
2140 2140 name = "venusian-1.2.0";
2141 2141 doCheck = false;
2142 2142 src = fetchurl {
2143 2143 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2144 2144 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2145 2145 };
2146 2146 meta = {
2147 2147 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2148 2148 };
2149 2149 };
2150 2150 "vine" = super.buildPythonPackage {
2151 2151 name = "vine-1.3.0";
2152 2152 doCheck = false;
2153 2153 src = fetchurl {
2154 2154 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2155 2155 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2156 2156 };
2157 2157 meta = {
2158 2158 license = [ pkgs.lib.licenses.bsdOriginal ];
2159 2159 };
2160 2160 };
2161 2161 "waitress" = super.buildPythonPackage {
2162 2162 name = "waitress-1.1.0";
2163 2163 doCheck = false;
2164 2164 src = fetchurl {
2165 2165 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
2166 2166 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
2167 2167 };
2168 2168 meta = {
2169 2169 license = [ pkgs.lib.licenses.zpl21 ];
2170 2170 };
2171 2171 };
2172 2172 "wcwidth" = super.buildPythonPackage {
2173 2173 name = "wcwidth-0.1.7";
2174 2174 doCheck = false;
2175 2175 src = fetchurl {
2176 2176 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
2177 2177 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
2178 2178 };
2179 2179 meta = {
2180 2180 license = [ pkgs.lib.licenses.mit ];
2181 2181 };
2182 2182 };
2183 2183 "webencodings" = super.buildPythonPackage {
2184 2184 name = "webencodings-0.5.1";
2185 2185 doCheck = false;
2186 2186 src = fetchurl {
2187 2187 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2188 2188 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2189 2189 };
2190 2190 meta = {
2191 2191 license = [ pkgs.lib.licenses.bsdOriginal ];
2192 2192 };
2193 2193 };
2194 2194 "weberror" = super.buildPythonPackage {
2195 2195 name = "weberror-0.10.3";
2196 2196 doCheck = false;
2197 2197 propagatedBuildInputs = [
2198 2198 self."webob"
2199 2199 self."tempita"
2200 2200 self."pygments"
2201 2201 self."paste"
2202 2202 ];
2203 2203 src = fetchurl {
2204 2204 url = "https://files.pythonhosted.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
2205 2205 sha256 = "0frg4kvycqpj5bi8asfqfs6bxsr2cvjvb6b56c4d1ai1z57kbjx6";
2206 2206 };
2207 2207 meta = {
2208 2208 license = [ pkgs.lib.licenses.mit ];
2209 2209 };
2210 2210 };
2211 2211 "webhelpers" = super.buildPythonPackage {
2212 2212 name = "webhelpers-1.3";
2213 2213 doCheck = false;
2214 2214 propagatedBuildInputs = [
2215 2215 self."markupsafe"
2216 2216 ];
2217 2217 src = fetchurl {
2218 2218 url = "https://files.pythonhosted.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
2219 2219 sha256 = "10x5i82qdkrvyw18gsybwggfhfpl869siaab89vnndi9x62g51pa";
2220 2220 };
2221 2221 meta = {
2222 2222 license = [ pkgs.lib.licenses.bsdOriginal ];
2223 2223 };
2224 2224 };
2225 2225 "webhelpers2" = super.buildPythonPackage {
2226 2226 name = "webhelpers2-2.0";
2227 2227 doCheck = false;
2228 2228 propagatedBuildInputs = [
2229 2229 self."markupsafe"
2230 2230 self."six"
2231 2231 ];
2232 2232 src = fetchurl {
2233 2233 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2234 2234 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2235 2235 };
2236 2236 meta = {
2237 2237 license = [ pkgs.lib.licenses.mit ];
2238 2238 };
2239 2239 };
2240 2240 "webob" = super.buildPythonPackage {
2241 2241 name = "webob-1.8.5";
2242 2242 doCheck = false;
2243 2243 src = fetchurl {
2244 2244 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2245 2245 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2246 2246 };
2247 2247 meta = {
2248 2248 license = [ pkgs.lib.licenses.mit ];
2249 2249 };
2250 2250 };
2251 2251 "webtest" = super.buildPythonPackage {
2252 2252 name = "webtest-2.0.33";
2253 2253 doCheck = false;
2254 2254 propagatedBuildInputs = [
2255 2255 self."six"
2256 2256 self."webob"
2257 2257 self."waitress"
2258 2258 self."beautifulsoup4"
2259 2259 ];
2260 2260 src = fetchurl {
2261 2261 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
2262 2262 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
2263 2263 };
2264 2264 meta = {
2265 2265 license = [ pkgs.lib.licenses.mit ];
2266 2266 };
2267 2267 };
2268 2268 "whoosh" = super.buildPythonPackage {
2269 2269 name = "whoosh-2.7.4";
2270 2270 doCheck = false;
2271 2271 src = fetchurl {
2272 2272 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2273 2273 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2274 2274 };
2275 2275 meta = {
2276 2276 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2277 2277 };
2278 2278 };
2279 2279 "ws4py" = super.buildPythonPackage {
2280 2280 name = "ws4py-0.5.1";
2281 2281 doCheck = false;
2282 2282 src = fetchurl {
2283 2283 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2284 2284 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2285 2285 };
2286 2286 meta = {
2287 2287 license = [ pkgs.lib.licenses.bsdOriginal ];
2288 2288 };
2289 2289 };
2290 2290 "wsgiref" = super.buildPythonPackage {
2291 2291 name = "wsgiref-0.1.2";
2292 2292 doCheck = false;
2293 2293 src = fetchurl {
2294 2294 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2295 2295 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2296 2296 };
2297 2297 meta = {
2298 2298 license = [ { fullName = "PSF or ZPL"; } ];
2299 2299 };
2300 2300 };
2301 2301 "zope.cachedescriptors" = super.buildPythonPackage {
2302 2302 name = "zope.cachedescriptors-4.3.1";
2303 2303 doCheck = false;
2304 2304 propagatedBuildInputs = [
2305 2305 self."setuptools"
2306 2306 ];
2307 2307 src = fetchurl {
2308 2308 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2309 2309 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2310 2310 };
2311 2311 meta = {
2312 2312 license = [ pkgs.lib.licenses.zpl21 ];
2313 2313 };
2314 2314 };
2315 2315 "zope.deprecation" = super.buildPythonPackage {
2316 2316 name = "zope.deprecation-4.4.0";
2317 2317 doCheck = false;
2318 2318 propagatedBuildInputs = [
2319 2319 self."setuptools"
2320 2320 ];
2321 2321 src = fetchurl {
2322 2322 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2323 2323 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2324 2324 };
2325 2325 meta = {
2326 2326 license = [ pkgs.lib.licenses.zpl21 ];
2327 2327 };
2328 2328 };
2329 2329 "zope.event" = super.buildPythonPackage {
2330 2330 name = "zope.event-4.4";
2331 2331 doCheck = false;
2332 2332 propagatedBuildInputs = [
2333 2333 self."setuptools"
2334 2334 ];
2335 2335 src = fetchurl {
2336 2336 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2337 2337 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2338 2338 };
2339 2339 meta = {
2340 2340 license = [ pkgs.lib.licenses.zpl21 ];
2341 2341 };
2342 2342 };
2343 2343 "zope.interface" = super.buildPythonPackage {
2344 2344 name = "zope.interface-4.6.0";
2345 2345 doCheck = false;
2346 2346 propagatedBuildInputs = [
2347 2347 self."setuptools"
2348 2348 ];
2349 2349 src = fetchurl {
2350 2350 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2351 2351 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2352 2352 };
2353 2353 meta = {
2354 2354 license = [ pkgs.lib.licenses.zpl21 ];
2355 2355 };
2356 2356 };
2357 2357
2358 2358 ### Test requirements
2359 2359
2360 2360
2361 2361 }
@@ -1,124 +1,124 b''
1 1 ## dependencies
2 2
3 3 amqp==2.3.1
4 4 # not released authomatic that has updated some oauth providers
5 5 https://code.rhodecode.com/upstream/authomatic/archive/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e.tar.gz?md5=3c68720a1322b25254009518d1ff6801#egg=authomatic==0.1.0.post1
6 6
7 7 babel==1.3
8 8 beaker==1.9.1
9 9 bleach==3.1.0
10 10 celery==4.1.1
11 11 channelstream==0.5.2
12 12 click==7.0
13 13 colander==1.7.0
14 14 # our custom configobj
15 15 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
16 16 cssselect==1.0.3
17 17 cryptography==2.6.1
18 18 decorator==4.1.2
19 19 deform==2.0.7
20 20 docutils==0.14.0
21 21 dogpile.cache==0.7.1
22 22 dogpile.core==0.4.1
23 23 formencode==1.2.4
24 24 future==0.14.3
25 25 futures==3.0.2
26 26 infrae.cache==1.0.1
27 27 iso8601==0.1.12
28 28 itsdangerous==0.24
29 29 kombu==4.2.1
30 30 lxml==4.2.5
31 31 mako==1.0.7
32 32 markdown==2.6.11
33 33 markupsafe==1.1.0
34 34 msgpack-python==0.5.6
35 35 pyotp==2.2.7
36 36 packaging==15.2
37 37 paste==3.0.8
38 38 pastedeploy==2.0.1
39 39 pastescript==3.1.0
40 40 peppercorn==0.6
41 41 psutil==5.5.1
42 42 py-bcrypt==0.4
43 43 pycurl==7.43.0.2
44 44 pycrypto==2.6.1
45 45 pygments==2.3.1
46 46 pyparsing==2.3.0
47 47 pyramid-beaker==0.8
48 48 pyramid-debugtoolbar==4.5.0
49 49 pyramid-mako==1.0.2
50 pyramid==1.10.2
50 pyramid==1.10.4
51 51 pyramid_mailer==0.15.1
52 52 python-dateutil
53 53 python-ldap==3.1.0
54 54 python-memcached==1.59
55 55 python-pam==1.8.4
56 56 python-saml==2.4.2
57 57 pytz==2018.4
58 58 tzlocal==1.5.1
59 59 pyzmq==14.6.0
60 60 py-gfm==0.1.4
61 61 redis==2.10.6
62 62 repoze.lru==0.7
63 63 requests==2.9.1
64 64 routes==2.4.1
65 65 simplejson==3.16.0
66 66 six==1.11.0
67 67 sqlalchemy==1.1.18
68 68 sshpubkeys==3.1.0
69 69 subprocess32==3.5.3
70 supervisor==3.3.5
70 supervisor==4.0.1
71 71 translationstring==1.3
72 72 urllib3==1.24.1
73 73 urlobject==2.4.3
74 74 venusian==1.2.0
75 75 weberror==0.10.3
76 76 webhelpers2==2.0
77 77 webhelpers==1.3
78 78 webob==1.8.5
79 79 whoosh==2.7.4
80 80 wsgiref==0.1.2
81 81 zope.cachedescriptors==4.3.1
82 82 zope.deprecation==4.4.0
83 83 zope.event==4.4.0
84 84 zope.interface==4.6.0
85 85
86 86 # DB drivers
87 87 mysql-python==1.2.5
88 88 pymysql==0.8.1
89 89 pysqlite==2.8.3
90 psycopg2==2.7.7
90 psycopg2==2.8.2
91 91
92 92 # IPYTHON RENDERING
93 93 # entrypoints backport, pypi version doesn't support egg installs
94 94 https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313#egg=entrypoints==0.2.2.rhodecode-upstream1
95 95 nbconvert==5.3.1
96 96 nbformat==4.4.0
97 97 jupyter_client==5.0.0
98 98
99 99 ## cli tools
100 alembic==1.0.8
100 alembic==1.0.9
101 101 invoke==0.13.0
102 102 bumpversion==0.5.3
103 103
104 104 ## http servers
105 105 gevent==1.4.0
106 106 greenlet==0.4.15
107 107 gunicorn==19.9.0
108 108 waitress==1.1.0
109 109
110 110 ## debug
111 111 ipdb==0.12.0
112 112 ipython==5.1.0
113 113
114 114 ## rhodecode-tools, special case
115 115 https://code.rhodecode.com/rhodecode-tools-ce/archive/v1.2.1.tar.gz?md5=25bc2f7de1da318e547236d3fb463d28#egg=rhodecode-tools==1.2.1
116 116
117 117 ## appenlight
118 118 appenlight-client==0.6.26
119 119
120 120 ## test related requirements
121 121 -r requirements_test.txt
122 122
123 123 ## uncomment to add the debug libraries
124 124 #-r requirements_debug.txt
@@ -1,16 +1,16 b''
1 1 # test related requirements
2 2 pytest==3.8.2
3 3 py==1.6.0
4 4 pytest-cov==2.6.0
5 5 pytest-sugar==0.9.1
6 6 pytest-runner==4.2.0
7 7 pytest-profiling==1.3.0
8 8 pytest-timeout==1.3.2
9 9 gprof2dot==2017.9.19
10 10
11 11 mock==1.0.1
12 12 cov-core==1.15.0
13 coverage==4.5.1
13 coverage==4.5.3
14 14
15 15 webtest==2.0.33
16 16 beautifulsoup4==4.6.3
@@ -1,57 +1,57 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import platform
24 24
25 25 VERSION = tuple(open(os.path.join(
26 26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
27 27
28 28 BACKENDS = {
29 29 'hg': 'Mercurial repository',
30 30 'git': 'Git repository',
31 31 'svn': 'Subversion repository',
32 32 }
33 33
34 34 CELERY_ENABLED = False
35 35 CELERY_EAGER = False
36 36
37 37 # link to config for pyramid
38 38 CONFIG = {}
39 39
40 40 # Populated with the settings dictionary from application init in
41 41 # rhodecode.conf.environment.load_pyramid_environment
42 42 PYRAMID_SETTINGS = {}
43 43
44 44 # Linked module for extensions
45 45 EXTENSIONS = {}
46 46
47 47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
48 __dbversion__ = 95 # defines current db version for migrations
48 __dbversion__ = 97 # defines current db version for migrations
49 49 __platform__ = platform.system()
50 50 __license__ = 'AGPLv3, and Commercial License'
51 51 __author__ = 'RhodeCode GmbH'
52 52 __url__ = 'https://code.rhodecode.com'
53 53
54 54 is_windows = __platform__ in ['Windows']
55 55 is_unix = not is_windows
56 56 is_test = False
57 57 disable_error_handler = False
@@ -1,446 +1,450 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 from rhodecode.apps._base import ADMIN_PREFIX
23 23
24 24
25 25 def admin_routes(config):
26 26 """
27 27 Admin prefixed routes
28 28 """
29 29
30 30 config.add_route(
31 31 name='admin_audit_logs',
32 32 pattern='/audit_logs')
33 33
34 34 config.add_route(
35 35 name='admin_audit_log_entry',
36 36 pattern='/audit_logs/{audit_log_id}')
37 37
38 38 config.add_route(
39 39 name='pull_requests_global_0', # backward compat
40 40 pattern='/pull_requests/{pull_request_id:\d+}')
41 41 config.add_route(
42 42 name='pull_requests_global_1', # backward compat
43 43 pattern='/pull-requests/{pull_request_id:\d+}')
44 44 config.add_route(
45 45 name='pull_requests_global',
46 46 pattern='/pull-request/{pull_request_id:\d+}')
47 47
48 48 config.add_route(
49 49 name='admin_settings_open_source',
50 50 pattern='/settings/open_source')
51 51 config.add_route(
52 52 name='admin_settings_vcs_svn_generate_cfg',
53 53 pattern='/settings/vcs/svn_generate_cfg')
54 54
55 55 config.add_route(
56 56 name='admin_settings_system',
57 57 pattern='/settings/system')
58 58 config.add_route(
59 59 name='admin_settings_system_update',
60 60 pattern='/settings/system/updates')
61 61
62 62 config.add_route(
63 63 name='admin_settings_exception_tracker',
64 64 pattern='/settings/exceptions')
65 65 config.add_route(
66 66 name='admin_settings_exception_tracker_delete_all',
67 67 pattern='/settings/exceptions/delete')
68 68 config.add_route(
69 69 name='admin_settings_exception_tracker_show',
70 70 pattern='/settings/exceptions/{exception_id}')
71 71 config.add_route(
72 72 name='admin_settings_exception_tracker_delete',
73 73 pattern='/settings/exceptions/{exception_id}/delete')
74 74
75 75 config.add_route(
76 76 name='admin_settings_sessions',
77 77 pattern='/settings/sessions')
78 78 config.add_route(
79 79 name='admin_settings_sessions_cleanup',
80 80 pattern='/settings/sessions/cleanup')
81 81
82 82 config.add_route(
83 83 name='admin_settings_process_management',
84 84 pattern='/settings/process_management')
85 85 config.add_route(
86 86 name='admin_settings_process_management_data',
87 87 pattern='/settings/process_management/data')
88 88 config.add_route(
89 89 name='admin_settings_process_management_signal',
90 90 pattern='/settings/process_management/signal')
91 91 config.add_route(
92 92 name='admin_settings_process_management_master_signal',
93 93 pattern='/settings/process_management/master_signal')
94 94
95 95 # default settings
96 96 config.add_route(
97 97 name='admin_defaults_repositories',
98 98 pattern='/defaults/repositories')
99 99 config.add_route(
100 100 name='admin_defaults_repositories_update',
101 101 pattern='/defaults/repositories/update')
102 102
103 103 # admin settings
104 104
105 105 config.add_route(
106 106 name='admin_settings',
107 107 pattern='/settings')
108 108 config.add_route(
109 109 name='admin_settings_update',
110 110 pattern='/settings/update')
111 111
112 112 config.add_route(
113 113 name='admin_settings_global',
114 114 pattern='/settings/global')
115 115 config.add_route(
116 116 name='admin_settings_global_update',
117 117 pattern='/settings/global/update')
118 118
119 119 config.add_route(
120 120 name='admin_settings_vcs',
121 121 pattern='/settings/vcs')
122 122 config.add_route(
123 123 name='admin_settings_vcs_update',
124 124 pattern='/settings/vcs/update')
125 125 config.add_route(
126 126 name='admin_settings_vcs_svn_pattern_delete',
127 127 pattern='/settings/vcs/svn_pattern_delete')
128 128
129 129 config.add_route(
130 130 name='admin_settings_mapping',
131 131 pattern='/settings/mapping')
132 132 config.add_route(
133 133 name='admin_settings_mapping_update',
134 134 pattern='/settings/mapping/update')
135 135
136 136 config.add_route(
137 137 name='admin_settings_visual',
138 138 pattern='/settings/visual')
139 139 config.add_route(
140 140 name='admin_settings_visual_update',
141 141 pattern='/settings/visual/update')
142 142
143 143
144 144 config.add_route(
145 145 name='admin_settings_issuetracker',
146 146 pattern='/settings/issue-tracker')
147 147 config.add_route(
148 148 name='admin_settings_issuetracker_update',
149 149 pattern='/settings/issue-tracker/update')
150 150 config.add_route(
151 151 name='admin_settings_issuetracker_test',
152 152 pattern='/settings/issue-tracker/test')
153 153 config.add_route(
154 154 name='admin_settings_issuetracker_delete',
155 155 pattern='/settings/issue-tracker/delete')
156 156
157 157 config.add_route(
158 158 name='admin_settings_email',
159 159 pattern='/settings/email')
160 160 config.add_route(
161 161 name='admin_settings_email_update',
162 162 pattern='/settings/email/update')
163 163
164 164 config.add_route(
165 165 name='admin_settings_hooks',
166 166 pattern='/settings/hooks')
167 167 config.add_route(
168 168 name='admin_settings_hooks_update',
169 169 pattern='/settings/hooks/update')
170 170 config.add_route(
171 171 name='admin_settings_hooks_delete',
172 172 pattern='/settings/hooks/delete')
173 173
174 174 config.add_route(
175 175 name='admin_settings_search',
176 176 pattern='/settings/search')
177 177
178 178 config.add_route(
179 179 name='admin_settings_labs',
180 180 pattern='/settings/labs')
181 181 config.add_route(
182 182 name='admin_settings_labs_update',
183 183 pattern='/settings/labs/update')
184 184
185 185 # Automation EE feature
186 186 config.add_route(
187 187 'admin_settings_automation',
188 188 pattern=ADMIN_PREFIX + '/settings/automation')
189 189
190 190 # global permissions
191 191
192 192 config.add_route(
193 193 name='admin_permissions_application',
194 194 pattern='/permissions/application')
195 195 config.add_route(
196 196 name='admin_permissions_application_update',
197 197 pattern='/permissions/application/update')
198 198
199 199 config.add_route(
200 200 name='admin_permissions_global',
201 201 pattern='/permissions/global')
202 202 config.add_route(
203 203 name='admin_permissions_global_update',
204 204 pattern='/permissions/global/update')
205 205
206 206 config.add_route(
207 207 name='admin_permissions_object',
208 208 pattern='/permissions/object')
209 209 config.add_route(
210 210 name='admin_permissions_object_update',
211 211 pattern='/permissions/object/update')
212 212
213 213 # Branch perms EE feature
214 214 config.add_route(
215 215 name='admin_permissions_branch',
216 216 pattern='/permissions/branch')
217 217
218 218 config.add_route(
219 219 name='admin_permissions_ips',
220 220 pattern='/permissions/ips')
221 221
222 222 config.add_route(
223 223 name='admin_permissions_overview',
224 224 pattern='/permissions/overview')
225 225
226 226 config.add_route(
227 227 name='admin_permissions_auth_token_access',
228 228 pattern='/permissions/auth_token_access')
229 229
230 230 config.add_route(
231 231 name='admin_permissions_ssh_keys',
232 232 pattern='/permissions/ssh_keys')
233 233 config.add_route(
234 234 name='admin_permissions_ssh_keys_data',
235 235 pattern='/permissions/ssh_keys/data')
236 236 config.add_route(
237 237 name='admin_permissions_ssh_keys_update',
238 238 pattern='/permissions/ssh_keys/update')
239 239
240 240 # users admin
241 241 config.add_route(
242 242 name='users',
243 243 pattern='/users')
244 244
245 245 config.add_route(
246 246 name='users_data',
247 247 pattern='/users_data')
248 248
249 249 config.add_route(
250 250 name='users_create',
251 251 pattern='/users/create')
252 252
253 253 config.add_route(
254 254 name='users_new',
255 255 pattern='/users/new')
256 256
257 257 # user management
258 258 config.add_route(
259 259 name='user_edit',
260 260 pattern='/users/{user_id:\d+}/edit',
261 261 user_route=True)
262 262 config.add_route(
263 263 name='user_edit_advanced',
264 264 pattern='/users/{user_id:\d+}/edit/advanced',
265 265 user_route=True)
266 266 config.add_route(
267 267 name='user_edit_global_perms',
268 268 pattern='/users/{user_id:\d+}/edit/global_permissions',
269 269 user_route=True)
270 270 config.add_route(
271 271 name='user_edit_global_perms_update',
272 272 pattern='/users/{user_id:\d+}/edit/global_permissions/update',
273 273 user_route=True)
274 274 config.add_route(
275 275 name='user_update',
276 276 pattern='/users/{user_id:\d+}/update',
277 277 user_route=True)
278 278 config.add_route(
279 279 name='user_delete',
280 280 pattern='/users/{user_id:\d+}/delete',
281 281 user_route=True)
282 282 config.add_route(
283 283 name='user_enable_force_password_reset',
284 284 pattern='/users/{user_id:\d+}/password_reset_enable',
285 285 user_route=True)
286 286 config.add_route(
287 287 name='user_disable_force_password_reset',
288 288 pattern='/users/{user_id:\d+}/password_reset_disable',
289 289 user_route=True)
290 290 config.add_route(
291 291 name='user_create_personal_repo_group',
292 292 pattern='/users/{user_id:\d+}/create_repo_group',
293 293 user_route=True)
294 294
295 295 # user auth tokens
296 296 config.add_route(
297 297 name='edit_user_auth_tokens',
298 298 pattern='/users/{user_id:\d+}/edit/auth_tokens',
299 299 user_route=True)
300 300 config.add_route(
301 301 name='edit_user_auth_tokens_add',
302 302 pattern='/users/{user_id:\d+}/edit/auth_tokens/new',
303 303 user_route=True)
304 304 config.add_route(
305 305 name='edit_user_auth_tokens_delete',
306 306 pattern='/users/{user_id:\d+}/edit/auth_tokens/delete',
307 307 user_route=True)
308 308
309 309 # user ssh keys
310 310 config.add_route(
311 311 name='edit_user_ssh_keys',
312 312 pattern='/users/{user_id:\d+}/edit/ssh_keys',
313 313 user_route=True)
314 314 config.add_route(
315 315 name='edit_user_ssh_keys_generate_keypair',
316 316 pattern='/users/{user_id:\d+}/edit/ssh_keys/generate',
317 317 user_route=True)
318 318 config.add_route(
319 319 name='edit_user_ssh_keys_add',
320 320 pattern='/users/{user_id:\d+}/edit/ssh_keys/new',
321 321 user_route=True)
322 322 config.add_route(
323 323 name='edit_user_ssh_keys_delete',
324 324 pattern='/users/{user_id:\d+}/edit/ssh_keys/delete',
325 325 user_route=True)
326 326
327 327 # user emails
328 328 config.add_route(
329 329 name='edit_user_emails',
330 330 pattern='/users/{user_id:\d+}/edit/emails',
331 331 user_route=True)
332 332 config.add_route(
333 333 name='edit_user_emails_add',
334 334 pattern='/users/{user_id:\d+}/edit/emails/new',
335 335 user_route=True)
336 336 config.add_route(
337 337 name='edit_user_emails_delete',
338 338 pattern='/users/{user_id:\d+}/edit/emails/delete',
339 339 user_route=True)
340 340
341 341 # user IPs
342 342 config.add_route(
343 343 name='edit_user_ips',
344 344 pattern='/users/{user_id:\d+}/edit/ips',
345 345 user_route=True)
346 346 config.add_route(
347 347 name='edit_user_ips_add',
348 348 pattern='/users/{user_id:\d+}/edit/ips/new',
349 349 user_route_with_default=True) # enabled for default user too
350 350 config.add_route(
351 351 name='edit_user_ips_delete',
352 352 pattern='/users/{user_id:\d+}/edit/ips/delete',
353 353 user_route_with_default=True) # enabled for default user too
354 354
355 355 # user perms
356 356 config.add_route(
357 357 name='edit_user_perms_summary',
358 358 pattern='/users/{user_id:\d+}/edit/permissions_summary',
359 359 user_route=True)
360 360 config.add_route(
361 361 name='edit_user_perms_summary_json',
362 362 pattern='/users/{user_id:\d+}/edit/permissions_summary/json',
363 363 user_route=True)
364 364
365 365 # user user groups management
366 366 config.add_route(
367 367 name='edit_user_groups_management',
368 368 pattern='/users/{user_id:\d+}/edit/groups_management',
369 369 user_route=True)
370 370
371 371 config.add_route(
372 372 name='edit_user_groups_management_updates',
373 373 pattern='/users/{user_id:\d+}/edit/edit_user_groups_management/updates',
374 374 user_route=True)
375 375
376 376 # user audit logs
377 377 config.add_route(
378 378 name='edit_user_audit_logs',
379 379 pattern='/users/{user_id:\d+}/edit/audit', user_route=True)
380 380
381 381 # user caches
382 382 config.add_route(
383 383 name='edit_user_caches',
384 384 pattern='/users/{user_id:\d+}/edit/caches',
385 385 user_route=True)
386 386 config.add_route(
387 387 name='edit_user_caches_update',
388 388 pattern='/users/{user_id:\d+}/edit/caches/update',
389 389 user_route=True)
390 390
391 391 # user-groups admin
392 392 config.add_route(
393 393 name='user_groups',
394 394 pattern='/user_groups')
395 395
396 396 config.add_route(
397 397 name='user_groups_data',
398 398 pattern='/user_groups_data')
399 399
400 400 config.add_route(
401 401 name='user_groups_new',
402 402 pattern='/user_groups/new')
403 403
404 404 config.add_route(
405 405 name='user_groups_create',
406 406 pattern='/user_groups/create')
407 407
408 408 # repos admin
409 409 config.add_route(
410 410 name='repos',
411 411 pattern='/repos')
412 412
413 413 config.add_route(
414 414 name='repo_new',
415 415 pattern='/repos/new')
416 416
417 417 config.add_route(
418 418 name='repo_create',
419 419 pattern='/repos/create')
420 420
421 421 # repo groups admin
422 422 config.add_route(
423 423 name='repo_groups',
424 424 pattern='/repo_groups')
425 425
426 426 config.add_route(
427 name='repo_groups_data',
428 pattern='/repo_groups_data')
429
430 config.add_route(
427 431 name='repo_group_new',
428 432 pattern='/repo_group/new')
429 433
430 434 config.add_route(
431 435 name='repo_group_create',
432 436 pattern='/repo_group/create')
433 437
434 438
435 439 def includeme(config):
436 440 from rhodecode.apps._base.navigation import includeme as nav_includeme
437 441
438 442 # Create admin navigation registry and add it to the pyramid registry.
439 443 nav_includeme(config)
440 444
441 445 # main admin routes
442 446 config.add_route(name='admin_home', pattern=ADMIN_PREFIX)
443 447 config.include(admin_routes, route_prefix=ADMIN_PREFIX)
444 448
445 449 # Scan module for configuration decorators.
446 450 config.scan('.views', ignore='.tests')
@@ -1,176 +1,194 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import pytest
23 23
24 24 from rhodecode.apps._base import ADMIN_PREFIX
25 25 from rhodecode.lib import helpers as h
26 from rhodecode.model.db import Repository, UserRepoToPerm, User
26 from rhodecode.model.db import Repository, UserRepoToPerm, User, RepoGroup
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.model.repo_group import RepoGroupModel
29 29 from rhodecode.tests import (
30 assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH, TestController)
30 assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH)
31 31 from rhodecode.tests.fixture import Fixture
32 32
33 33 fixture = Fixture()
34 34
35 35
36 36 def route_path(name, params=None, **kwargs):
37 37 import urllib
38 38
39 39 base_url = {
40 40 'repo_groups': ADMIN_PREFIX + '/repo_groups',
41 'repo_groups_data': ADMIN_PREFIX + '/repo_groups_data',
41 42 'repo_group_new': ADMIN_PREFIX + '/repo_group/new',
42 43 'repo_group_create': ADMIN_PREFIX + '/repo_group/create',
43 44
44 45 }[name].format(**kwargs)
45 46
46 47 if params:
47 48 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
48 49 return base_url
49 50
50 51
51 52 def _get_permission_for_user(user, repo):
52 53 perm = UserRepoToPerm.query()\
53 54 .filter(UserRepoToPerm.repository ==
54 55 Repository.get_by_repo_name(repo))\
55 56 .filter(UserRepoToPerm.user == User.get_by_username(user))\
56 57 .all()
57 58 return perm
58 59
59 60
60 61 @pytest.mark.usefixtures("app")
61 62 class TestAdminRepositoryGroups(object):
63
62 64 def test_show_repo_groups(self, autologin_user):
63 response = self.app.get(route_path('repo_groups'))
64 response.mustcontain('data: []')
65 self.app.get(route_path('repo_groups'))
66
67 def test_show_repo_groups_data(self, autologin_user, xhr_header):
68 response = self.app.get(route_path(
69 'repo_groups_data'), extra_environ=xhr_header)
70
71 all_repo_groups = RepoGroup.query().count()
72 assert response.json['recordsTotal'] == all_repo_groups
65 73
66 def test_show_repo_groups_after_creating_group(self, autologin_user):
74 def test_show_repo_groups_data_filtered(self, autologin_user, xhr_header):
75 response = self.app.get(route_path(
76 'repo_groups_data', params={'search[value]': 'empty_search'}),
77 extra_environ=xhr_header)
78
79 all_repo_groups = RepoGroup.query().count()
80 assert response.json['recordsTotal'] == all_repo_groups
81 assert response.json['recordsFiltered'] == 0
82
83 def test_show_repo_groups_after_creating_group(self, autologin_user, xhr_header):
67 84 fixture.create_repo_group('test_repo_group')
68 response = self.app.get(route_path('repo_groups'))
85 response = self.app.get(route_path(
86 'repo_groups_data'), extra_environ=xhr_header)
69 87 response.mustcontain('"name_raw": "test_repo_group"')
70 88 fixture.destroy_repo_group('test_repo_group')
71 89
72 90 def test_new(self, autologin_user):
73 91 self.app.get(route_path('repo_group_new'))
74 92
75 93 def test_new_with_parent_group(self, autologin_user, user_util):
76 94 gr = user_util.create_repo_group()
77 95
78 96 self.app.get(route_path('repo_group_new'),
79 97 params=dict(parent_group=gr.group_name))
80 98
81 99 def test_new_by_regular_user_no_permission(self, autologin_regular_user):
82 100 self.app.get(route_path('repo_group_new'), status=403)
83 101
84 102 @pytest.mark.parametrize('repo_group_name', [
85 103 'git_repo',
86 104 'git_repo_ąć',
87 105 'hg_repo',
88 106 '12345',
89 107 'hg_repo_ąć',
90 108 ])
91 109 def test_create(self, autologin_user, repo_group_name, csrf_token):
92 110 repo_group_name_unicode = repo_group_name.decode('utf8')
93 111 description = 'description for newly created repo group'
94 112
95 113 response = self.app.post(
96 114 route_path('repo_group_create'),
97 115 fixture._get_group_create_params(
98 116 group_name=repo_group_name,
99 117 group_description=description,
100 118 csrf_token=csrf_token))
101 119
102 120 # run the check page that triggers the flash message
103 121 repo_gr_url = h.route_path(
104 122 'repo_group_home', repo_group_name=repo_group_name)
105 123
106 124 assert_session_flash(
107 125 response,
108 126 'Created repository group <a href="%s">%s</a>' % (
109 127 repo_gr_url, repo_group_name_unicode))
110 128
111 129 # # test if the repo group was created in the database
112 130 new_repo_group = RepoGroupModel()._get_repo_group(
113 131 repo_group_name_unicode)
114 132 assert new_repo_group is not None
115 133
116 134 assert new_repo_group.group_name == repo_group_name_unicode
117 135 assert new_repo_group.group_description == description
118 136
119 137 # test if the repository is visible in the list ?
120 138 response = self.app.get(repo_gr_url)
121 139 response.mustcontain(repo_group_name)
122 140
123 141 # test if the repository group was created on filesystem
124 142 is_on_filesystem = os.path.isdir(
125 143 os.path.join(TESTS_TMP_PATH, repo_group_name))
126 144 if not is_on_filesystem:
127 145 self.fail('no repo group %s in filesystem' % repo_group_name)
128 146
129 147 RepoGroupModel().delete(repo_group_name_unicode)
130 148 Session().commit()
131 149
132 150 @pytest.mark.parametrize('repo_group_name', [
133 151 'git_repo',
134 152 'git_repo_ąć',
135 153 'hg_repo',
136 154 '12345',
137 155 'hg_repo_ąć',
138 156 ])
139 157 def test_create_subgroup(self, autologin_user, user_util, repo_group_name, csrf_token):
140 158 parent_group = user_util.create_repo_group()
141 159 parent_group_name = parent_group.group_name
142 160
143 161 expected_group_name = '{}/{}'.format(
144 162 parent_group_name, repo_group_name)
145 163 expected_group_name_unicode = expected_group_name.decode('utf8')
146 164
147 165 try:
148 166 response = self.app.post(
149 167 route_path('repo_group_create'),
150 168 fixture._get_group_create_params(
151 169 group_name=repo_group_name,
152 170 group_parent_id=parent_group.group_id,
153 171 group_description='Test desciption',
154 172 csrf_token=csrf_token))
155 173
156 174 assert_session_flash(
157 175 response,
158 176 u'Created repository group <a href="%s">%s</a>' % (
159 177 h.route_path('repo_group_home',
160 178 repo_group_name=expected_group_name),
161 179 expected_group_name_unicode))
162 180 finally:
163 181 RepoGroupModel().delete(expected_group_name_unicode)
164 182 Session().commit()
165 183
166 184 def test_user_with_creation_permissions_cannot_create_subgroups(
167 185 self, autologin_regular_user, user_util):
168 186
169 187 user_util.grant_user_permission(
170 188 TEST_USER_REGULAR_LOGIN, 'hg.repogroup.create.true')
171 189 parent_group = user_util.create_repo_group()
172 190 parent_group_id = parent_group.group_id
173 191 self.app.get(
174 192 route_path('repo_group_new',
175 193 params=dict(parent_group=parent_group_id), ),
176 194 status=403)
@@ -1,215 +1,361 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20 import datetime
21 21 import logging
22 22 import formencode
23 23 import formencode.htmlfill
24 24
25 25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 26 from pyramid.view import view_config
27 27 from pyramid.renderers import render
28 28 from pyramid.response import Response
29 29
30 30 from rhodecode import events
31 31 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 32
33 from rhodecode.lib.ext_json import json
34 33 from rhodecode.lib.auth import (
35 34 LoginRequired, CSRFRequired, NotAnonymous,
36 35 HasPermissionAny, HasRepoGroupPermissionAny)
37 36 from rhodecode.lib import helpers as h, audit_logger
38 from rhodecode.lib.utils2 import safe_int, safe_unicode
37 from rhodecode.lib.utils2 import safe_int, safe_unicode, datetime_to_time
39 38 from rhodecode.model.forms import RepoGroupForm
40 39 from rhodecode.model.repo_group import RepoGroupModel
41 40 from rhodecode.model.scm import RepoGroupList
42 from rhodecode.model.db import Session, RepoGroup
41 from rhodecode.model.db import (
42 or_, count, func, in_filter_generator, Session, RepoGroup, User, Repository)
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 class AdminRepoGroupsView(BaseAppView, DataGridAppView):
48 48
49 49 def load_default_context(self):
50 50 c = self._get_local_tmpl_context()
51 51
52 52 return c
53 53
54 54 def _load_form_data(self, c):
55 55 allow_empty_group = False
56 56
57 57 if self._can_create_repo_group():
58 58 # we're global admin, we're ok and we can create TOP level groups
59 59 allow_empty_group = True
60 60
61 61 # override the choices for this form, we need to filter choices
62 62 # and display only those we have ADMIN right
63 63 groups_with_admin_rights = RepoGroupList(
64 64 RepoGroup.query().all(),
65 65 perm_set=['group.admin'])
66 66 c.repo_groups = RepoGroup.groups_choices(
67 67 groups=groups_with_admin_rights,
68 68 show_empty_group=allow_empty_group)
69 69
70 70 def _can_create_repo_group(self, parent_group_id=None):
71 71 is_admin = HasPermissionAny('hg.admin')('group create controller')
72 72 create_repo_group = HasPermissionAny(
73 73 'hg.repogroup.create.true')('group create controller')
74 74 if is_admin or (create_repo_group and not parent_group_id):
75 75 # we're global admin, or we have global repo group create
76 76 # permission
77 77 # we're ok and we can create TOP level groups
78 78 return True
79 79 elif parent_group_id:
80 80 # we check the permission if we can write to parent group
81 81 group = RepoGroup.get(parent_group_id)
82 82 group_name = group.group_name if group else None
83 83 if HasRepoGroupPermissionAny('group.admin')(
84 84 group_name, 'check if user is an admin of group'):
85 85 # we're an admin of passed in group, we're ok.
86 86 return True
87 87 else:
88 88 return False
89 89 return False
90 90
91 # permission check in data loading of
92 # `repo_group_list_data` via RepoGroupList
91 93 @LoginRequired()
92 94 @NotAnonymous()
93 # perms check inside
94 95 @view_config(
95 96 route_name='repo_groups', request_method='GET',
96 97 renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako')
97 98 def repo_group_list(self):
98 99 c = self.load_default_context()
100 return self._get_template_context(c)
99 101
100 repo_group_list = RepoGroup.get_all_repo_groups()
101 repo_group_list_acl = RepoGroupList(
102 repo_group_list, perm_set=['group.admin'])
103 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
104 repo_group_list=repo_group_list_acl, admin=True)
105 c.data = json.dumps(repo_group_data)
106 return self._get_template_context(c)
102 # permission check inside
103 @LoginRequired()
104 @NotAnonymous()
105 @view_config(
106 route_name='repo_groups_data', request_method='GET',
107 renderer='json_ext', xhr=True)
108 def repo_group_list_data(self):
109 self.load_default_context()
110 column_map = {
111 'name_raw': 'group_name_hash',
112 'desc': 'group_description',
113 'last_change_raw': 'updated_on',
114 'top_level_repos': 'repos_total',
115 'owner': 'user_username',
116 }
117 draw, start, limit = self._extract_chunk(self.request)
118 search_q, order_by, order_dir = self._extract_ordering(
119 self.request, column_map=column_map)
120
121 _render = self.request.get_partial_renderer(
122 'rhodecode:templates/data_table/_dt_elements.mako')
123 c = _render.get_call_context()
124
125 def quick_menu(repo_group_name):
126 return _render('quick_repo_group_menu', repo_group_name)
127
128 def repo_group_lnk(repo_group_name):
129 return _render('repo_group_name', repo_group_name)
130
131 def last_change(last_change):
132 if isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
133 delta = datetime.timedelta(
134 seconds=(datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
135 last_change = last_change + delta
136 return _render("last_change", last_change)
137
138 def desc(desc, personal):
139 return _render(
140 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
141
142 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
143 return _render(
144 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
145
146 def user_profile(username):
147 return _render('user_profile', username)
148
149 auth_repo_group_list = RepoGroupList(
150 RepoGroup.query().all(), perm_set=['group.admin'])
151
152 allowed_ids = [-1]
153 for repo_group in auth_repo_group_list:
154 allowed_ids.append(repo_group.group_id)
155
156 repo_groups_data_total_count = RepoGroup.query()\
157 .filter(or_(
158 # generate multiple IN to fix limitation problems
159 *in_filter_generator(RepoGroup.group_id, allowed_ids)
160 )) \
161 .count()
162
163 repo_groups_data_total_inactive_count = RepoGroup.query()\
164 .filter(RepoGroup.group_id.in_(allowed_ids))\
165 .count()
166
167 repo_count = count(Repository.repo_id)
168 base_q = Session.query(
169 RepoGroup.group_name,
170 RepoGroup.group_name_hash,
171 RepoGroup.group_description,
172 RepoGroup.group_id,
173 RepoGroup.personal,
174 RepoGroup.updated_on,
175 User,
176 repo_count.label('repos_count')
177 ) \
178 .filter(or_(
179 # generate multiple IN to fix limitation problems
180 *in_filter_generator(RepoGroup.group_id, allowed_ids)
181 )) \
182 .outerjoin(Repository) \
183 .join(User, User.user_id == RepoGroup.user_id) \
184 .group_by(RepoGroup, User)
185
186 if search_q:
187 like_expression = u'%{}%'.format(safe_unicode(search_q))
188 base_q = base_q.filter(or_(
189 RepoGroup.group_name.ilike(like_expression),
190 ))
191
192 repo_groups_data_total_filtered_count = base_q.count()
193 # the inactive isn't really used, but we still make it same as other data grids
194 # which use inactive (users,user groups)
195 repo_groups_data_total_filtered_inactive_count = repo_groups_data_total_filtered_count
196
197 sort_defined = False
198 if order_by == 'group_name':
199 sort_col = func.lower(RepoGroup.group_name)
200 sort_defined = True
201 elif order_by == 'repos_total':
202 sort_col = repo_count
203 sort_defined = True
204 elif order_by == 'user_username':
205 sort_col = User.username
206 else:
207 sort_col = getattr(RepoGroup, order_by, None)
208
209 if sort_defined or sort_col:
210 if order_dir == 'asc':
211 sort_col = sort_col.asc()
212 else:
213 sort_col = sort_col.desc()
214
215 base_q = base_q.order_by(sort_col)
216 base_q = base_q.offset(start).limit(limit)
217
218 # authenticated access to user groups
219 auth_repo_group_list = base_q.all()
220
221 repo_groups_data = []
222 for repo_gr in auth_repo_group_list:
223 row = {
224 "menu": quick_menu(repo_gr.group_name),
225 "name": repo_group_lnk(repo_gr.group_name),
226 "name_raw": repo_gr.group_name,
227 "last_change": last_change(repo_gr.updated_on),
228 "last_change_raw": datetime_to_time(repo_gr.updated_on),
229
230 "last_changeset": "",
231 "last_changeset_raw": "",
232
233 "desc": desc(repo_gr.group_description, repo_gr.personal),
234 "owner": user_profile(repo_gr.User.username),
235 "top_level_repos": repo_gr.repos_count,
236 "action": repo_group_actions(
237 repo_gr.group_id, repo_gr.group_name, repo_gr.repos_count),
238
239 }
240
241 repo_groups_data.append(row)
242
243 data = ({
244 'draw': draw,
245 'data': repo_groups_data,
246 'recordsTotal': repo_groups_data_total_count,
247 'recordsTotalInactive': repo_groups_data_total_inactive_count,
248 'recordsFiltered': repo_groups_data_total_filtered_count,
249 'recordsFilteredInactive': repo_groups_data_total_filtered_inactive_count,
250 })
251
252 return data
107 253
108 254 @LoginRequired()
109 255 @NotAnonymous()
110 256 # perm checks inside
111 257 @view_config(
112 258 route_name='repo_group_new', request_method='GET',
113 259 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
114 260 def repo_group_new(self):
115 261 c = self.load_default_context()
116 262
117 263 # perm check for admin, create_group perm or admin of parent_group
118 264 parent_group_id = safe_int(self.request.GET.get('parent_group'))
119 265 if not self._can_create_repo_group(parent_group_id):
120 266 raise HTTPForbidden()
121 267
122 268 self._load_form_data(c)
123 269
124 270 defaults = {} # Future proof for default of repo group
125 271 data = render(
126 272 'rhodecode:templates/admin/repo_groups/repo_group_add.mako',
127 273 self._get_template_context(c), self.request)
128 274 html = formencode.htmlfill.render(
129 275 data,
130 276 defaults=defaults,
131 277 encoding="UTF-8",
132 278 force_defaults=False
133 279 )
134 280 return Response(html)
135 281
136 282 @LoginRequired()
137 283 @NotAnonymous()
138 284 @CSRFRequired()
139 285 # perm checks inside
140 286 @view_config(
141 287 route_name='repo_group_create', request_method='POST',
142 288 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
143 289 def repo_group_create(self):
144 290 c = self.load_default_context()
145 291 _ = self.request.translate
146 292
147 293 parent_group_id = safe_int(self.request.POST.get('group_parent_id'))
148 294 can_create = self._can_create_repo_group(parent_group_id)
149 295
150 296 self._load_form_data(c)
151 297 # permissions for can create group based on parent_id are checked
152 298 # here in the Form
153 299 available_groups = map(lambda k: safe_unicode(k[0]), c.repo_groups)
154 300 repo_group_form = RepoGroupForm(
155 301 self.request.translate, available_groups=available_groups,
156 302 can_create_in_root=can_create)()
157 303
158 304 repo_group_name = self.request.POST.get('group_name')
159 305 try:
160 306 owner = self._rhodecode_user
161 307 form_result = repo_group_form.to_python(dict(self.request.POST))
162 308 copy_permissions = form_result.get('group_copy_permissions')
163 309 repo_group = RepoGroupModel().create(
164 310 group_name=form_result['group_name_full'],
165 311 group_description=form_result['group_description'],
166 312 owner=owner.user_id,
167 313 copy_permissions=form_result['group_copy_permissions']
168 314 )
169 315 Session().flush()
170 316
171 317 repo_group_data = repo_group.get_api_data()
172 318 audit_logger.store_web(
173 319 'repo_group.create', action_data={'data': repo_group_data},
174 320 user=self._rhodecode_user)
175 321
176 322 Session().commit()
177 323
178 324 _new_group_name = form_result['group_name_full']
179 325
180 326 repo_group_url = h.link_to(
181 327 _new_group_name,
182 328 h.route_path('repo_group_home', repo_group_name=_new_group_name))
183 329 h.flash(h.literal(_('Created repository group %s')
184 330 % repo_group_url), category='success')
185 331
186 332 except formencode.Invalid as errors:
187 333 data = render(
188 334 'rhodecode:templates/admin/repo_groups/repo_group_add.mako',
189 335 self._get_template_context(c), self.request)
190 336 html = formencode.htmlfill.render(
191 337 data,
192 338 defaults=errors.value,
193 339 errors=errors.error_dict or {},
194 340 prefix_error=False,
195 341 encoding="UTF-8",
196 342 force_defaults=False
197 343 )
198 344 return Response(html)
199 345 except Exception:
200 346 log.exception("Exception during creation of repository group")
201 347 h.flash(_('Error occurred during creation of repository group %s')
202 348 % repo_group_name, category='error')
203 349 raise HTTPFound(h.route_path('home'))
204 350
205 351 affected_user_ids = [self._rhodecode_user.user_id]
206 352 if copy_permissions:
207 353 user_group_perms = repo_group.permissions(expand_from_user_groups=True)
208 354 copy_perms = [perm['user_id'] for perm in user_group_perms]
209 355 # also include those newly created by copy
210 356 affected_user_ids.extend(copy_perms)
211 357 events.trigger(events.UserPermissionsChange(affected_user_ids))
212 358
213 359 raise HTTPFound(
214 360 h.route_path('repo_group_home',
215 361 repo_group_name=form_result['group_name_full']))
@@ -1,259 +1,271 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 import formencode
24 24 import formencode.htmlfill
25 25
26 26 from pyramid.httpexceptions import HTTPFound
27 27 from pyramid.view import view_config
28 28 from pyramid.response import Response
29 29 from pyramid.renderers import render
30 30
31 31 from rhodecode import events
32 32 from rhodecode.apps._base import BaseAppView, DataGridAppView
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
35 35 from rhodecode.lib import helpers as h, audit_logger
36 36 from rhodecode.lib.utils2 import safe_unicode
37 37
38 38 from rhodecode.model.forms import UserGroupForm
39 39 from rhodecode.model.permission import PermissionModel
40 40 from rhodecode.model.scm import UserGroupList
41 41 from rhodecode.model.db import (
42 or_, count, User, UserGroup, UserGroupMember)
42 or_, count, User, UserGroup, UserGroupMember, in_filter_generator)
43 43 from rhodecode.model.meta import Session
44 44 from rhodecode.model.user_group import UserGroupModel
45 45 from rhodecode.model.db import true
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class AdminUserGroupsView(BaseAppView, DataGridAppView):
51 51
52 52 def load_default_context(self):
53 53 c = self._get_local_tmpl_context()
54 54
55 55 PermissionModel().set_global_permission_choices(
56 56 c, gettext_translator=self.request.translate)
57 57
58 58 return c
59 59
60 60 # permission check in data loading of
61 61 # `user_groups_list_data` via UserGroupList
62 62 @LoginRequired()
63 63 @NotAnonymous()
64 64 @view_config(
65 65 route_name='user_groups', request_method='GET',
66 66 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
67 67 def user_groups_list(self):
68 68 c = self.load_default_context()
69 69 return self._get_template_context(c)
70 70
71 71 # permission check inside
72 72 @LoginRequired()
73 73 @NotAnonymous()
74 74 @view_config(
75 75 route_name='user_groups_data', request_method='GET',
76 76 renderer='json_ext', xhr=True)
77 77 def user_groups_list_data(self):
78 78 self.load_default_context()
79 79 column_map = {
80 80 'active': 'users_group_active',
81 81 'description': 'user_group_description',
82 82 'members': 'members_total',
83 83 'owner': 'user_username',
84 84 'sync': 'group_data'
85 85 }
86 86 draw, start, limit = self._extract_chunk(self.request)
87 87 search_q, order_by, order_dir = self._extract_ordering(
88 88 self.request, column_map=column_map)
89 89
90 90 _render = self.request.get_partial_renderer(
91 91 'rhodecode:templates/data_table/_dt_elements.mako')
92 92
93 93 def user_group_name(user_group_name):
94 94 return _render("user_group_name", user_group_name)
95 95
96 96 def user_group_actions(user_group_id, user_group_name):
97 97 return _render("user_group_actions", user_group_id, user_group_name)
98 98
99 99 def user_profile(username):
100 100 return _render('user_profile', username)
101 101
102 102 auth_user_group_list = UserGroupList(
103 103 UserGroup.query().all(), perm_set=['usergroup.admin'])
104 104
105 105 allowed_ids = [-1]
106 106 for user_group in auth_user_group_list:
107 107 allowed_ids.append(user_group.users_group_id)
108 108
109 109 user_groups_data_total_count = UserGroup.query()\
110 .filter(UserGroup.users_group_id.in_(allowed_ids))\
110 .filter(or_(
111 # generate multiple IN to fix limitation problems
112 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
113 ))\
111 114 .count()
112 115
113 116 user_groups_data_total_inactive_count = UserGroup.query()\
114 .filter(UserGroup.users_group_id.in_(allowed_ids))\
117 .filter(or_(
118 # generate multiple IN to fix limitation problems
119 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
120 ))\
115 121 .filter(UserGroup.users_group_active != true()).count()
116 122
117 123 member_count = count(UserGroupMember.user_id)
118 124 base_q = Session.query(
119 125 UserGroup.users_group_name,
120 126 UserGroup.user_group_description,
121 127 UserGroup.users_group_active,
122 128 UserGroup.users_group_id,
123 129 UserGroup.group_data,
124 130 User,
125 131 member_count.label('member_count')
126 132 ) \
127 .filter(UserGroup.users_group_id.in_(allowed_ids)) \
133 .filter(or_(
134 # generate multiple IN to fix limitation problems
135 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
136 )) \
128 137 .outerjoin(UserGroupMember) \
129 138 .join(User, User.user_id == UserGroup.user_id) \
130 139 .group_by(UserGroup, User)
131 140
132 141 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
133 142
134 143 if search_q:
135 144 like_expression = u'%{}%'.format(safe_unicode(search_q))
136 145 base_q = base_q.filter(or_(
137 146 UserGroup.users_group_name.ilike(like_expression),
138 147 ))
139 148 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
140 149
141 150 user_groups_data_total_filtered_count = base_q.count()
142 151 user_groups_data_total_filtered_inactive_count = base_q_inactive.count()
143 152
153 sort_defined = False
144 154 if order_by == 'members_total':
145 155 sort_col = member_count
156 sort_defined = True
146 157 elif order_by == 'user_username':
147 158 sort_col = User.username
148 159 else:
149 160 sort_col = getattr(UserGroup, order_by, None)
150 161
151 if isinstance(sort_col, count) or sort_col:
162 if sort_defined or sort_col:
152 163 if order_dir == 'asc':
153 164 sort_col = sort_col.asc()
154 165 else:
155 166 sort_col = sort_col.desc()
156 167
157 168 base_q = base_q.order_by(sort_col)
158 169 base_q = base_q.offset(start).limit(limit)
159 170
160 171 # authenticated access to user groups
161 172 auth_user_group_list = base_q.all()
162 173
163 174 user_groups_data = []
164 175 for user_gr in auth_user_group_list:
165 user_groups_data.append({
176 row = {
166 177 "users_group_name": user_group_name(user_gr.users_group_name),
167 178 "name_raw": h.escape(user_gr.users_group_name),
168 179 "description": h.escape(user_gr.user_group_description),
169 180 "members": user_gr.member_count,
170 181 # NOTE(marcink): because of advanced query we
171 182 # need to load it like that
172 183 "sync": UserGroup._load_sync(
173 184 UserGroup._load_group_data(user_gr.group_data)),
174 185 "active": h.bool2icon(user_gr.users_group_active),
175 186 "owner": user_profile(user_gr.User.username),
176 187 "action": user_group_actions(
177 188 user_gr.users_group_id, user_gr.users_group_name)
178 })
189 }
190 user_groups_data.append(row)
179 191
180 192 data = ({
181 193 'draw': draw,
182 194 'data': user_groups_data,
183 195 'recordsTotal': user_groups_data_total_count,
184 196 'recordsTotalInactive': user_groups_data_total_inactive_count,
185 197 'recordsFiltered': user_groups_data_total_filtered_count,
186 198 'recordsFilteredInactive': user_groups_data_total_filtered_inactive_count,
187 199 })
188 200
189 201 return data
190 202
191 203 @LoginRequired()
192 204 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
193 205 @view_config(
194 206 route_name='user_groups_new', request_method='GET',
195 207 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
196 208 def user_groups_new(self):
197 209 c = self.load_default_context()
198 210 return self._get_template_context(c)
199 211
200 212 @LoginRequired()
201 213 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
202 214 @CSRFRequired()
203 215 @view_config(
204 216 route_name='user_groups_create', request_method='POST',
205 217 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
206 218 def user_groups_create(self):
207 219 _ = self.request.translate
208 220 c = self.load_default_context()
209 221 users_group_form = UserGroupForm(self.request.translate)()
210 222
211 223 user_group_name = self.request.POST.get('users_group_name')
212 224 try:
213 225 form_result = users_group_form.to_python(dict(self.request.POST))
214 226 user_group = UserGroupModel().create(
215 227 name=form_result['users_group_name'],
216 228 description=form_result['user_group_description'],
217 229 owner=self._rhodecode_user.user_id,
218 230 active=form_result['users_group_active'])
219 231 Session().flush()
220 232 creation_data = user_group.get_api_data()
221 233 user_group_name = form_result['users_group_name']
222 234
223 235 audit_logger.store_web(
224 236 'user_group.create', action_data={'data': creation_data},
225 237 user=self._rhodecode_user)
226 238
227 239 user_group_link = h.link_to(
228 240 h.escape(user_group_name),
229 241 h.route_path(
230 242 'edit_user_group', user_group_id=user_group.users_group_id))
231 243 h.flash(h.literal(_('Created user group %(user_group_link)s')
232 244 % {'user_group_link': user_group_link}),
233 245 category='success')
234 246 Session().commit()
235 247 user_group_id = user_group.users_group_id
236 248 except formencode.Invalid as errors:
237 249
238 250 data = render(
239 251 'rhodecode:templates/admin/user_groups/user_group_add.mako',
240 252 self._get_template_context(c), self.request)
241 253 html = formencode.htmlfill.render(
242 254 data,
243 255 defaults=errors.value,
244 256 errors=errors.error_dict or {},
245 257 prefix_error=False,
246 258 encoding="UTF-8",
247 259 force_defaults=False
248 260 )
249 261 return Response(html)
250 262
251 263 except Exception:
252 264 log.exception("Exception creating user group")
253 265 h.flash(_('Error occurred during creation of user group %s') \
254 266 % user_group_name, category='error')
255 267 raise HTTPFound(h.route_path('user_groups_new'))
256 268
257 269 events.trigger(events.UserPermissionsChange([self._rhodecode_user.user_id]))
258 270 raise HTTPFound(
259 271 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -1,316 +1,310 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24 from pyramid.httpexceptions import HTTPFound
25 25
26 26 from rhodecode import events
27 27 from rhodecode.apps._base import RepoAppView
28 28 from rhodecode.lib import helpers as h
29 29 from rhodecode.lib import audit_logger
30 30 from rhodecode.lib.auth import (
31 31 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired,
32 32 HasRepoPermissionAny)
33 33 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs import RepositoryError
36 36 from rhodecode.model.db import Session, UserFollowing, User, Repository
37 37 from rhodecode.model.repo import RepoModel
38 38 from rhodecode.model.scm import ScmModel
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 class RepoSettingsView(RepoAppView):
44 44
45 45 def load_default_context(self):
46 46 c = self._get_local_tmpl_context()
47 47 return c
48 48
49 49 def _get_users_with_permissions(self):
50 50 user_permissions = {}
51 51 for perm in self.db_repo.permissions():
52 52 user_permissions[perm.user_id] = perm
53 53
54 54 return user_permissions
55 55
56 56 @LoginRequired()
57 57 @HasRepoPermissionAnyDecorator('repository.admin')
58 58 @view_config(
59 59 route_name='edit_repo_advanced', request_method='GET',
60 60 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
61 61 def edit_advanced(self):
62 62 c = self.load_default_context()
63 63 c.active = 'advanced'
64 64
65 65 c.default_user_id = User.get_default_user().user_id
66 66 c.in_public_journal = UserFollowing.query() \
67 67 .filter(UserFollowing.user_id == c.default_user_id) \
68 68 .filter(UserFollowing.follows_repository == self.db_repo).scalar()
69 69
70 c.has_origin_repo_read_perm = False
71 if self.db_repo.fork:
72 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
73 'repository.write', 'repository.read', 'repository.admin')(
74 self.db_repo.fork.repo_name, 'repo set as fork page')
75
76 70 c.ver_info_dict = self.rhodecode_vcs_repo.get_hooks_info()
77 71
78 72 return self._get_template_context(c)
79 73
80 74 @LoginRequired()
81 75 @HasRepoPermissionAnyDecorator('repository.admin')
82 76 @CSRFRequired()
83 77 @view_config(
84 78 route_name='edit_repo_advanced_archive', request_method='POST',
85 79 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
86 80 def edit_advanced_archive(self):
87 81 """
88 82 Archives the repository. It will become read-only, and not visible in search
89 83 or other queries. But still visible for super-admins.
90 84 """
91 85
92 86 _ = self.request.translate
93 87
94 88 try:
95 89 old_data = self.db_repo.get_api_data()
96 90 RepoModel().archive(self.db_repo)
97 91
98 92 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
99 93 audit_logger.store_web(
100 94 'repo.archive', action_data={'old_data': old_data},
101 95 user=self._rhodecode_user, repo=repo)
102 96
103 97 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
104 98 h.flash(
105 99 _('Archived repository `%s`') % self.db_repo_name,
106 100 category='success')
107 101 Session().commit()
108 102 except Exception:
109 103 log.exception("Exception during archiving of repository")
110 104 h.flash(_('An error occurred during archiving of `%s`')
111 105 % self.db_repo_name, category='error')
112 106 # redirect to advanced for more deletion options
113 107 raise HTTPFound(
114 108 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
115 109 _anchor='advanced-archive'))
116 110
117 111 # flush permissions for all users defined in permissions
118 112 affected_user_ids = self._get_users_with_permissions().keys()
119 113 events.trigger(events.UserPermissionsChange(affected_user_ids))
120 114
121 115 raise HTTPFound(h.route_path('home'))
122 116
123 117 @LoginRequired()
124 118 @HasRepoPermissionAnyDecorator('repository.admin')
125 119 @CSRFRequired()
126 120 @view_config(
127 121 route_name='edit_repo_advanced_delete', request_method='POST',
128 122 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
129 123 def edit_advanced_delete(self):
130 124 """
131 125 Deletes the repository, or shows warnings if deletion is not possible
132 126 because of attached forks or other errors.
133 127 """
134 128 _ = self.request.translate
135 129 handle_forks = self.request.POST.get('forks', None)
136 130 if handle_forks == 'detach_forks':
137 131 handle_forks = 'detach'
138 132 elif handle_forks == 'delete_forks':
139 133 handle_forks = 'delete'
140 134
141 135 try:
142 136 old_data = self.db_repo.get_api_data()
143 137 RepoModel().delete(self.db_repo, forks=handle_forks)
144 138
145 139 _forks = self.db_repo.forks.count()
146 140 if _forks and handle_forks:
147 141 if handle_forks == 'detach_forks':
148 142 h.flash(_('Detached %s forks') % _forks, category='success')
149 143 elif handle_forks == 'delete_forks':
150 144 h.flash(_('Deleted %s forks') % _forks, category='success')
151 145
152 146 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
153 147 audit_logger.store_web(
154 148 'repo.delete', action_data={'old_data': old_data},
155 149 user=self._rhodecode_user, repo=repo)
156 150
157 151 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
158 152 h.flash(
159 153 _('Deleted repository `%s`') % self.db_repo_name,
160 154 category='success')
161 155 Session().commit()
162 156 except AttachedForksError:
163 157 repo_advanced_url = h.route_path(
164 158 'edit_repo_advanced', repo_name=self.db_repo_name,
165 159 _anchor='advanced-delete')
166 160 delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url)
167 161 h.flash(_('Cannot delete `{repo}` it still contains attached forks. '
168 162 'Try using {delete_or_detach} option.')
169 163 .format(repo=self.db_repo_name, delete_or_detach=delete_anchor),
170 164 category='warning')
171 165
172 166 # redirect to advanced for forks handle action ?
173 167 raise HTTPFound(repo_advanced_url)
174 168
175 169 except AttachedPullRequestsError:
176 170 repo_advanced_url = h.route_path(
177 171 'edit_repo_advanced', repo_name=self.db_repo_name,
178 172 _anchor='advanced-delete')
179 173 attached_prs = len(self.db_repo.pull_requests_source +
180 174 self.db_repo.pull_requests_target)
181 175 h.flash(
182 176 _('Cannot delete `{repo}` it still contains {num} attached pull requests. '
183 177 'Consider archiving the repository instead.').format(
184 178 repo=self.db_repo_name, num=attached_prs), category='warning')
185 179
186 180 # redirect to advanced for forks handle action ?
187 181 raise HTTPFound(repo_advanced_url)
188 182
189 183 except Exception:
190 184 log.exception("Exception during deletion of repository")
191 185 h.flash(_('An error occurred during deletion of `%s`')
192 186 % self.db_repo_name, category='error')
193 187 # redirect to advanced for more deletion options
194 188 raise HTTPFound(
195 189 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
196 190 _anchor='advanced-delete'))
197 191
198 192 raise HTTPFound(h.route_path('home'))
199 193
200 194 @LoginRequired()
201 195 @HasRepoPermissionAnyDecorator('repository.admin')
202 196 @CSRFRequired()
203 197 @view_config(
204 198 route_name='edit_repo_advanced_journal', request_method='POST',
205 199 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
206 200 def edit_advanced_journal(self):
207 201 """
208 202 Set's this repository to be visible in public journal,
209 203 in other words making default user to follow this repo
210 204 """
211 205 _ = self.request.translate
212 206
213 207 try:
214 208 user_id = User.get_default_user().user_id
215 209 ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id)
216 210 h.flash(_('Updated repository visibility in public journal'),
217 211 category='success')
218 212 Session().commit()
219 213 except Exception:
220 214 h.flash(_('An error occurred during setting this '
221 215 'repository in public journal'),
222 216 category='error')
223 217
224 218 raise HTTPFound(
225 219 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
226 220
227 221 @LoginRequired()
228 222 @HasRepoPermissionAnyDecorator('repository.admin')
229 223 @CSRFRequired()
230 224 @view_config(
231 225 route_name='edit_repo_advanced_fork', request_method='POST',
232 226 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
233 227 def edit_advanced_fork(self):
234 228 """
235 229 Mark given repository as a fork of another
236 230 """
237 231 _ = self.request.translate
238 232
239 233 new_fork_id = safe_int(self.request.POST.get('id_fork_of'))
240 234
241 235 # valid repo, re-check permissions
242 236 if new_fork_id:
243 237 repo = Repository.get(new_fork_id)
244 238 # ensure we have at least read access to the repo we mark
245 239 perm_check = HasRepoPermissionAny(
246 240 'repository.read', 'repository.write', 'repository.admin')
247 241
248 242 if repo and perm_check(repo_name=repo.repo_name):
249 243 new_fork_id = repo.repo_id
250 244 else:
251 245 new_fork_id = None
252 246
253 247 try:
254 248 repo = ScmModel().mark_as_fork(
255 249 self.db_repo_name, new_fork_id, self._rhodecode_user.user_id)
256 250 fork = repo.fork.repo_name if repo.fork else _('Nothing')
257 251 Session().commit()
258 252 h.flash(
259 253 _('Marked repo %s as fork of %s') % (self.db_repo_name, fork),
260 254 category='success')
261 255 except RepositoryError as e:
262 256 log.exception("Repository Error occurred")
263 257 h.flash(str(e), category='error')
264 258 except Exception:
265 259 log.exception("Exception while editing fork")
266 260 h.flash(_('An error occurred during this operation'),
267 261 category='error')
268 262
269 263 raise HTTPFound(
270 264 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
271 265
272 266 @LoginRequired()
273 267 @HasRepoPermissionAnyDecorator('repository.admin')
274 268 @CSRFRequired()
275 269 @view_config(
276 270 route_name='edit_repo_advanced_locking', request_method='POST',
277 271 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
278 272 def edit_advanced_locking(self):
279 273 """
280 274 Toggle locking of repository
281 275 """
282 276 _ = self.request.translate
283 277 set_lock = self.request.POST.get('set_lock')
284 278 set_unlock = self.request.POST.get('set_unlock')
285 279
286 280 try:
287 281 if set_lock:
288 282 Repository.lock(self.db_repo, self._rhodecode_user.user_id,
289 283 lock_reason=Repository.LOCK_WEB)
290 284 h.flash(_('Locked repository'), category='success')
291 285 elif set_unlock:
292 286 Repository.unlock(self.db_repo)
293 287 h.flash(_('Unlocked repository'), category='success')
294 288 except Exception as e:
295 289 log.exception("Exception during unlocking")
296 290 h.flash(_('An error occurred during unlocking'), category='error')
297 291
298 292 raise HTTPFound(
299 293 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
300 294
301 295 @LoginRequired()
302 296 @HasRepoPermissionAnyDecorator('repository.admin')
303 297 @view_config(
304 298 route_name='edit_repo_advanced_hooks', request_method='GET',
305 299 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
306 300 def edit_advanced_install_hooks(self):
307 301 """
308 302 Install Hooks for repository
309 303 """
310 304 _ = self.request.translate
311 305 self.load_default_context()
312 306 self.rhodecode_vcs_repo.install_hooks(force=True)
313 307 h.flash(_('installed updated hooks into this repository'),
314 308 category='success')
315 309 raise HTTPFound(
316 310 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
@@ -1,123 +1,147 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 import shutil
24 23 import logging
25 24 import tempfile
26 25 import textwrap
27
26 import collections
28 27 from .base import VcsServer
28 from rhodecode.model.db import RhodeCodeUi
29 from rhodecode.model.settings import VcsSettingsModel
29 30
30 31 log = logging.getLogger(__name__)
31 32
32 33
33 34 class MercurialTunnelWrapper(object):
34 35 process = None
35 36
36 37 def __init__(self, server):
37 38 self.server = server
38 39 self.stdin = sys.stdin
39 40 self.stdout = sys.stdout
40 self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp()
41 self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp()
41 self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp(prefix='hgrc_rhodecode_')
42 42
43 43 def create_hooks_env(self):
44 repo_name = self.server.repo_name
45 hg_flags = self.config_to_hgrc(repo_name)
44 46
45 47 content = textwrap.dedent(
46 48 '''
47 # SSH hooks version=1.0.0
48 [hooks]
49 pretxnchangegroup.ssh_auth=python:vcsserver.hooks.pre_push_ssh_auth
50 pretxnchangegroup.ssh=python:vcsserver.hooks.pre_push_ssh
51 changegroup.ssh=python:vcsserver.hooks.post_push_ssh
49 # RhodeCode SSH hooks version=2.0.0
50 {custom}
51 '''
52 ).format(custom='\n'.join(hg_flags))
52 53
53 preoutgoing.ssh=python:vcsserver.hooks.pre_pull_ssh
54 outgoing.ssh=python:vcsserver.hooks.post_pull_ssh
54 root = self.server.get_root_store()
55 hgrc_custom = os.path.join(root, repo_name, '.hg', 'hgrc_rhodecode')
56 hgrc_main = os.path.join(root, repo_name, '.hg', 'hgrc')
55 57
56 '''
57 )
58 # cleanup custom hgrc file
59 if os.path.isfile(hgrc_custom):
60 with open(hgrc_custom, 'wb') as f:
61 f.write('')
62 log.debug('Cleanup custom hgrc file under %s', hgrc_custom)
58 63
64 # write temp
59 65 with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file:
60 66 hooks_env_file.write(content)
61 root = self.server.get_root_store()
62 67
63 hgrc_custom = os.path.join(
64 root, self.server.repo_name, '.hg', 'hgrc_rhodecode')
65 log.debug('Wrote custom hgrc file under %s', hgrc_custom)
66 shutil.move(
67 self.hooks_env_path, hgrc_custom)
68
69 hgrc_main = os.path.join(
70 root, self.server.repo_name, '.hg', 'hgrc')
71 include_marker = '%include hgrc_rhodecode'
68 return self.hooks_env_path
72 69
73 if not os.path.isfile(hgrc_main):
74 os.mknod(hgrc_main)
75
76 with open(hgrc_main, 'rb') as f:
77 data = f.read()
78 has_marker = include_marker in data
70 def remove_configs(self):
71 os.remove(self.hooks_env_path)
79 72
80 if not has_marker:
81 log.debug('Adding include marker for hooks')
82 with open(hgrc_main, 'wa') as f:
83 f.write(textwrap.dedent('''
84 # added by RhodeCode
85 {}
86 '''.format(include_marker)))
87
88 def command(self):
73 def command(self, hgrc_path):
89 74 root = self.server.get_root_store()
90 75
91 76 command = (
92 "cd {root}; {hg_path} -R {root}{repo_name} "
77 "cd {root}; HGRCPATH={hgrc} {hg_path} -R {root}{repo_name} "
93 78 "serve --stdio".format(
94 79 root=root, hg_path=self.server.hg_path,
95 repo_name=self.server.repo_name))
80 repo_name=self.server.repo_name, hgrc=hgrc_path))
96 81 log.debug("Final CMD: %s", command)
97 82 return command
98 83
99 84 def run(self, extras):
100 85 # at this point we cannot tell, we do further ACL checks
101 86 # inside the hooks
102 87 action = '?'
103 88 # permissions are check via `pre_push_ssh_auth` hook
104 89 self.server.update_environment(action=action, extras=extras)
105 self.create_hooks_env()
106 return os.system(self.command())
90 custom_hgrc_file = self.create_hooks_env()
91
92 try:
93 return os.system(self.command(custom_hgrc_file))
94 finally:
95 self.remove_configs()
107 96
108 97
109 98 class MercurialServer(VcsServer):
110 99 backend = 'hg'
100 cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks']
111 101
112 def __init__(self, store, ini_path, repo_name,
113 user, user_permissions, config, env):
114 super(MercurialServer, self).\
115 __init__(user, user_permissions, config, env)
102 def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env):
103 super(MercurialServer, self).__init__(user, user_permissions, config, env)
116 104
117 105 self.store = store
118 106 self.ini_path = ini_path
119 107 self.repo_name = repo_name
120 self._path = self.hg_path = config.get(
121 'app:main', 'ssh.executable.hg')
108 self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg')
109 self.tunnel = MercurialTunnelWrapper(server=self)
110
111 def config_to_hgrc(self, repo_name):
112 ui_sections = collections.defaultdict(list)
113 ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
114
115 # write default hooks
116 default_hooks = [
117 ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
118 ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
119 ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
120
121 ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
122 ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
123 ]
124
125 for k, v in default_hooks:
126 ui_sections['hooks'].append((k, v))
122 127
123 self.tunnel = MercurialTunnelWrapper(server=self)
128 for entry in ui:
129 if not entry.active:
130 continue
131 sec = entry.section
132 key = entry.key
133
134 if sec in self.cli_flags:
135 # we want only custom hooks, so we skip builtins
136 if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
137 continue
138
139 ui_sections[sec].append([key, entry.value])
140
141 flags = []
142 for _sec, key_val in ui_sections.items():
143 flags.append(' ')
144 flags.append('[{}]'.format(_sec))
145 for key, val in key_val:
146 flags.append('{}= {}'.format(key, val))
147 return flags
@@ -1,116 +1,119 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import os
21 22 import mock
22 23 import pytest
23 24
24 25 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
25 26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26 27
27 28
28 29 class MercurialServerCreator(object):
29 30 root = '/tmp/repo/path/'
30 31 hg_path = '/usr/local/bin/hg'
31 32
32 33 config_data = {
33 34 'app:main': {
34 35 'ssh.executable.hg': hg_path,
35 36 'vcs.hooks.protocol': 'http',
36 37 }
37 38 }
38 39 repo_name = 'test_hg'
39 40 user = plain_dummy_user()
40 41
41 42 def __init__(self):
42 43 def config_get(part, key):
43 44 return self.config_data.get(part, {}).get(key)
44 45 self.config_mock = mock.Mock()
45 46 self.config_mock.get = mock.Mock(side_effect=config_get)
46 47
47 48 def create(self, **kwargs):
48 49 parameters = {
49 50 'store': self.root,
50 51 'ini_path': '',
51 52 'user': self.user,
52 53 'repo_name': self.repo_name,
53 54 'user_permissions': {
54 55 'test_hg': 'repository.admin'
55 56 },
56 57 'config': self.config_mock,
57 58 'env': plain_dummy_env()
58 59 }
59 60 parameters.update(kwargs)
60 61 server = MercurialServer(**parameters)
61 62 return server
62 63
63 64
64 65 @pytest.fixture
65 66 def hg_server(app):
66 67 return MercurialServerCreator()
67 68
68 69
69 70 class TestMercurialServer(object):
70 71
71 def test_command(self, hg_server):
72 def test_command(self, hg_server, tmpdir):
72 73 server = hg_server.create()
74 custom_hgrc = os.path.join(str(tmpdir), 'hgrc')
73 75 expected_command = (
74 'cd {root}; {hg_path} -R {root}{repo_name} serve --stdio'.format(
75 root=hg_server.root, hg_path=hg_server.hg_path,
76 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format(
77 root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path,
76 78 repo_name=hg_server.repo_name)
77 79 )
78 assert expected_command == server.tunnel.command()
80 server_command = server.tunnel.command(custom_hgrc)
81 assert expected_command == server_command
79 82
80 83 @pytest.mark.parametrize('permissions, action, code', [
81 84 ({}, 'pull', -2),
82 85 ({'test_hg': 'repository.read'}, 'pull', 0),
83 86 ({'test_hg': 'repository.read'}, 'push', -2),
84 87 ({'test_hg': 'repository.write'}, 'push', 0),
85 88 ({'test_hg': 'repository.admin'}, 'push', 0),
86 89
87 90 ])
88 91 def test_permission_checks(self, hg_server, permissions, action, code):
89 92 server = hg_server.create(user_permissions=permissions)
90 93 result = server._check_permissions(action)
91 94 assert result is code
92 95
93 96 @pytest.mark.parametrize('permissions, value', [
94 97 ({}, False),
95 98 ({'test_hg': 'repository.read'}, False),
96 99 ({'test_hg': 'repository.write'}, True),
97 100 ({'test_hg': 'repository.admin'}, True),
98 101
99 102 ])
100 103 def test_has_write_permissions(self, hg_server, permissions, value):
101 104 server = hg_server.create(user_permissions=permissions)
102 105 result = server.has_write_perm()
103 106 assert result is value
104 107
105 108 def test_run_returns_executes_command(self, hg_server):
106 109 server = hg_server.create()
107 110 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
108 111 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
109 112 _patch.return_value = 0
110 113 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
111 114 exit_code = server.run()
112 115
113 116 assert exit_code == (0, False)
114 117
115 118
116 119
@@ -1,459 +1,463 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import deform
22 22 import logging
23 23 import peppercorn
24 24 import webhelpers.paginate
25 25
26 26 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPNotFound
27 27
28 28 from rhodecode.integrations import integration_type_registry
29 29 from rhodecode.apps._base import BaseAppView
30 30 from rhodecode.apps._base.navigation import navigation_list
31 31 from rhodecode.lib.auth import (
32 32 LoginRequired, CSRFRequired, HasPermissionAnyDecorator,
33 33 HasRepoPermissionAnyDecorator, HasRepoGroupPermissionAnyDecorator)
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.model.db import Repository, RepoGroup, Session, Integration
37 37 from rhodecode.model.scm import ScmModel
38 38 from rhodecode.model.integration import IntegrationModel
39 39 from rhodecode.model.validation_schema.schemas.integration_schema import (
40 40 make_integration_schema, IntegrationScopeType)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 class IntegrationSettingsViewBase(BaseAppView):
46 46 """
47 47 Base Integration settings view used by both repo / global settings
48 48 """
49 49
50 50 def __init__(self, context, request):
51 51 super(IntegrationSettingsViewBase, self).__init__(context, request)
52 52 self._load_view_context()
53 53
54 54 def _load_view_context(self):
55 55 """
56 56 This avoids boilerplate for repo/global+list/edit+views/templates
57 57 by doing all possible contexts at the same time however it should
58 58 be split up into separate functions once more "contexts" exist
59 59 """
60 60
61 61 self.IntegrationType = None
62 62 self.repo = None
63 63 self.repo_group = None
64 64 self.integration = None
65 65 self.integrations = {}
66 66
67 67 request = self.request
68 68
69 69 if 'repo_name' in request.matchdict: # in repo settings context
70 70 repo_name = request.matchdict['repo_name']
71 71 self.repo = Repository.get_by_repo_name(repo_name)
72 72
73 73 if 'repo_group_name' in request.matchdict: # in group settings context
74 74 repo_group_name = request.matchdict['repo_group_name']
75 75 self.repo_group = RepoGroup.get_by_group_name(repo_group_name)
76 76
77 77 if 'integration' in request.matchdict: # integration type context
78 78 integration_type = request.matchdict['integration']
79 79 if integration_type not in integration_type_registry:
80 80 raise HTTPNotFound()
81 81
82 82 self.IntegrationType = integration_type_registry[integration_type]
83 83 if self.IntegrationType.is_dummy:
84 84 raise HTTPNotFound()
85 85
86 86 if 'integration_id' in request.matchdict: # single integration context
87 87 integration_id = request.matchdict['integration_id']
88 88 self.integration = Integration.get(integration_id)
89 89
90 90 # extra perms check just in case
91 91 if not self._has_perms_for_integration(self.integration):
92 92 raise HTTPForbidden()
93 93
94 94 self.settings = self.integration and self.integration.settings or {}
95 95 self.admin_view = not (self.repo or self.repo_group)
96 96
97 97 def _has_perms_for_integration(self, integration):
98 98 perms = self.request.user.permissions
99 99
100 100 if 'hg.admin' in perms['global']:
101 101 return True
102 102
103 103 if integration.repo:
104 104 return perms['repositories'].get(
105 105 integration.repo.repo_name) == 'repository.admin'
106 106
107 107 if integration.repo_group:
108 108 return perms['repositories_groups'].get(
109 109 integration.repo_group.group_name) == 'group.admin'
110 110
111 111 return False
112 112
113 113 def _get_local_tmpl_context(self, include_app_defaults=True):
114 114 _ = self.request.translate
115 115 c = super(IntegrationSettingsViewBase, self)._get_local_tmpl_context(
116 116 include_app_defaults=include_app_defaults)
117
118 117 c.active = 'integrations'
119 118
120 119 return c
121 120
122 121 def _form_schema(self):
123 122 schema = make_integration_schema(IntegrationType=self.IntegrationType,
124 123 settings=self.settings)
125 124
126 125 # returns a clone, important if mutating the schema later
127 126 return schema.bind(
128 127 permissions=self.request.user.permissions,
129 128 no_scope=not self.admin_view)
130 129
131 130 def _form_defaults(self):
132 131 _ = self.request.translate
133 132 defaults = {}
134 133
135 134 if self.integration:
136 135 defaults['settings'] = self.integration.settings or {}
137 136 defaults['options'] = {
138 137 'name': self.integration.name,
139 138 'enabled': self.integration.enabled,
140 139 'scope': {
141 140 'repo': self.integration.repo,
142 141 'repo_group': self.integration.repo_group,
143 142 'child_repos_only': self.integration.child_repos_only,
144 143 },
145 144 }
146 145 else:
147 146 if self.repo:
148 147 scope = _('{repo_name} repository').format(
149 148 repo_name=self.repo.repo_name)
150 149 elif self.repo_group:
151 150 scope = _('{repo_group_name} repo group').format(
152 151 repo_group_name=self.repo_group.group_name)
153 152 else:
154 153 scope = _('Global')
155 154
156 155 defaults['options'] = {
157 156 'enabled': True,
158 157 'name': _('{name} integration').format(
159 158 name=self.IntegrationType.display_name),
160 159 }
161 160 defaults['options']['scope'] = {
162 161 'repo': self.repo,
163 162 'repo_group': self.repo_group,
164 163 }
165 164
166 165 return defaults
167 166
168 167 def _delete_integration(self, integration):
169 168 _ = self.request.translate
170 169 Session().delete(integration)
171 170 Session().commit()
172 171 h.flash(
173 172 _('Integration {integration_name} deleted successfully.').format(
174 173 integration_name=integration.name),
175 174 category='success')
176 175
177 176 if self.repo:
178 177 redirect_to = self.request.route_path(
179 178 'repo_integrations_home', repo_name=self.repo.repo_name)
180 179 elif self.repo_group:
181 180 redirect_to = self.request.route_path(
182 181 'repo_group_integrations_home',
183 182 repo_group_name=self.repo_group.group_name)
184 183 else:
185 184 redirect_to = self.request.route_path('global_integrations_home')
186 185 raise HTTPFound(redirect_to)
187 186
188 187 def _integration_list(self):
189 188 """ List integrations """
190 189
191 190 c = self.load_default_context()
192 191 if self.repo:
193 192 scope = self.repo
194 193 elif self.repo_group:
195 194 scope = self.repo_group
196 195 else:
197 196 scope = 'all'
198 197
199 198 integrations = []
200 199
201 200 for IntType, integration in IntegrationModel().get_integrations(
202 201 scope=scope, IntegrationType=self.IntegrationType):
203 202
204 203 # extra permissions check *just in case*
205 204 if not self._has_perms_for_integration(integration):
206 205 continue
207 206
208 207 integrations.append((IntType, integration))
209 208
210 209 sort_arg = self.request.GET.get('sort', 'name:asc')
211 210 sort_dir = 'asc'
212 211 if ':' in sort_arg:
213 212 sort_field, sort_dir = sort_arg.split(':')
214 213 else:
215 214 sort_field = sort_arg, 'asc'
216 215
217 216 assert sort_field in ('name', 'integration_type', 'enabled', 'scope')
218 217
219 218 integrations.sort(
220 219 key=lambda x: getattr(x[1], sort_field),
221 220 reverse=(sort_dir == 'desc'))
222 221
223 222 page_url = webhelpers.paginate.PageURL(
224 223 self.request.path, self.request.GET)
225 224 page = safe_int(self.request.GET.get('page', 1), 1)
226 225
227 226 integrations = h.Page(
228 227 integrations, page=page, items_per_page=10, url=page_url)
229 228
230 229 c.rev_sort_dir = sort_dir != 'desc' and 'desc' or 'asc'
231 230
232 231 c.current_IntegrationType = self.IntegrationType
233 232 c.integrations_list = integrations
234 233 c.available_integrations = integration_type_registry
235 234
236 235 return self._get_template_context(c)
237 236
238 237 def _settings_get(self, defaults=None, form=None):
239 238 """
240 239 View that displays the integration settings as a form.
241 240 """
242 241 c = self.load_default_context()
243 242
244 243 defaults = defaults or self._form_defaults()
245 244 schema = self._form_schema()
246 245
247 246 if self.integration:
248 247 buttons = ('submit', 'delete')
249 248 else:
250 249 buttons = ('submit',)
251 250
252 251 form = form or deform.Form(schema, appstruct=defaults, buttons=buttons)
253 252
254 253 c.form = form
255 254 c.current_IntegrationType = self.IntegrationType
256 255 c.integration = self.integration
257 256
258 257 return self._get_template_context(c)
259 258
260 259 def _settings_post(self):
261 260 """
262 261 View that validates and stores the integration settings.
263 262 """
264 263 _ = self.request.translate
265 264
266 265 controls = self.request.POST.items()
267 266 pstruct = peppercorn.parse(controls)
268 267
269 268 if self.integration and pstruct.get('delete'):
270 269 return self._delete_integration(self.integration)
271 270
272 271 schema = self._form_schema()
273 272
274 273 skip_settings_validation = False
275 274 if self.integration and 'enabled' not in pstruct.get('options', {}):
276 275 skip_settings_validation = True
277 276 schema['settings'].validator = None
278 277 for field in schema['settings'].children:
279 278 field.validator = None
280 279 field.missing = ''
281 280
282 281 if self.integration:
283 282 buttons = ('submit', 'delete')
284 283 else:
285 284 buttons = ('submit',)
286 285
287 286 form = deform.Form(schema, buttons=buttons)
288 287
289 288 if not self.admin_view:
290 289 # scope is read only field in these cases, and has to be added
291 290 options = pstruct.setdefault('options', {})
292 291 if 'scope' not in options:
293 292 options['scope'] = IntegrationScopeType().serialize(None, {
294 293 'repo': self.repo,
295 294 'repo_group': self.repo_group,
296 295 })
297 296
298 297 try:
299 298 valid_data = form.validate_pstruct(pstruct)
300 299 except deform.ValidationFailure as e:
301 300 h.flash(
302 301 _('Errors exist when saving integration settings. '
303 302 'Please check the form inputs.'),
304 303 category='error')
305 304 return self._settings_get(form=e)
306 305
307 306 if not self.integration:
308 307 self.integration = Integration()
309 308 self.integration.integration_type = self.IntegrationType.key
310 309 Session().add(self.integration)
311 310
312 311 scope = valid_data['options']['scope']
313 312
314 313 IntegrationModel().update_integration(self.integration,
315 314 name=valid_data['options']['name'],
316 315 enabled=valid_data['options']['enabled'],
317 316 settings=valid_data['settings'],
318 317 repo=scope['repo'],
319 318 repo_group=scope['repo_group'],
320 319 child_repos_only=scope['child_repos_only'],
321 320 )
322 321
323 322 self.integration.settings = valid_data['settings']
324 323 Session().commit()
325 324 # Display success message and redirect.
326 325 h.flash(
327 326 _('Integration {integration_name} updated successfully.').format(
328 327 integration_name=self.IntegrationType.display_name),
329 328 category='success')
330 329
331 330 # if integration scope changes, we must redirect to the right place
332 331 # keeping in mind if the original view was for /repo/ or /_admin/
333 332 admin_view = not (self.repo or self.repo_group)
334 333
335 334 if self.integration.repo and not admin_view:
336 335 redirect_to = self.request.route_path(
337 336 'repo_integrations_edit',
338 337 repo_name=self.integration.repo.repo_name,
339 338 integration=self.integration.integration_type,
340 339 integration_id=self.integration.integration_id)
341 340 elif self.integration.repo_group and not admin_view:
342 341 redirect_to = self.request.route_path(
343 342 'repo_group_integrations_edit',
344 343 repo_group_name=self.integration.repo_group.group_name,
345 344 integration=self.integration.integration_type,
346 345 integration_id=self.integration.integration_id)
347 346 else:
348 347 redirect_to = self.request.route_path(
349 348 'global_integrations_edit',
350 349 integration=self.integration.integration_type,
351 350 integration_id=self.integration.integration_id)
352 351
353 352 return HTTPFound(redirect_to)
354 353
355 354 def _new_integration(self):
356 355 c = self.load_default_context()
357 356 c.available_integrations = integration_type_registry
358 357 return self._get_template_context(c)
359 358
360 359 def load_default_context(self):
361 360 raise NotImplementedError()
362 361
363 362
364 363 class GlobalIntegrationsView(IntegrationSettingsViewBase):
365 364 def load_default_context(self):
366 365 c = self._get_local_tmpl_context()
367 366 c.repo = self.repo
368 367 c.repo_group = self.repo_group
369 368 c.navlist = navigation_list(self.request)
370 369
371 370 return c
372 371
373 372 @LoginRequired()
374 373 @HasPermissionAnyDecorator('hg.admin')
375 374 def integration_list(self):
376 375 return self._integration_list()
377 376
378 377 @LoginRequired()
379 378 @HasPermissionAnyDecorator('hg.admin')
380 379 def settings_get(self):
381 380 return self._settings_get()
382 381
383 382 @LoginRequired()
384 383 @HasPermissionAnyDecorator('hg.admin')
385 384 @CSRFRequired()
386 385 def settings_post(self):
387 386 return self._settings_post()
388 387
389 388 @LoginRequired()
390 389 @HasPermissionAnyDecorator('hg.admin')
391 390 def new_integration(self):
392 391 return self._new_integration()
393 392
394 393
395 394 class RepoIntegrationsView(IntegrationSettingsViewBase):
396 395 def load_default_context(self):
397 396 c = self._get_local_tmpl_context()
398 397
399 398 c.repo = self.repo
400 399 c.repo_group = self.repo_group
401 400
402 401 self.db_repo = self.repo
403 402 c.rhodecode_db_repo = self.repo
404 403 c.repo_name = self.db_repo.repo_name
405 404 c.repository_pull_requests = ScmModel().get_pull_requests(self.repo)
406 405
406 c.has_origin_repo_read_perm = False
407 if self.db_repo.fork:
408 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
409 'repository.write', 'repository.read', 'repository.admin')(
410 self.db_repo.fork.repo_name, 'summary fork link')
407 411 return c
408 412
409 413 @LoginRequired()
410 414 @HasRepoPermissionAnyDecorator('repository.admin')
411 415 def integration_list(self):
412 416 return self._integration_list()
413 417
414 418 @LoginRequired()
415 419 @HasRepoPermissionAnyDecorator('repository.admin')
416 420 def settings_get(self):
417 421 return self._settings_get()
418 422
419 423 @LoginRequired()
420 424 @HasRepoPermissionAnyDecorator('repository.admin')
421 425 @CSRFRequired()
422 426 def settings_post(self):
423 427 return self._settings_post()
424 428
425 429 @LoginRequired()
426 430 @HasRepoPermissionAnyDecorator('repository.admin')
427 431 def new_integration(self):
428 432 return self._new_integration()
429 433
430 434
431 435 class RepoGroupIntegrationsView(IntegrationSettingsViewBase):
432 436 def load_default_context(self):
433 437 c = self._get_local_tmpl_context()
434 438 c.repo = self.repo
435 439 c.repo_group = self.repo_group
436 440 c.navlist = navigation_list(self.request)
437 441
438 442 return c
439 443
440 444 @LoginRequired()
441 445 @HasRepoGroupPermissionAnyDecorator('group.admin')
442 446 def integration_list(self):
443 447 return self._integration_list()
444 448
445 449 @LoginRequired()
446 450 @HasRepoGroupPermissionAnyDecorator('group.admin')
447 451 def settings_get(self):
448 452 return self._settings_get()
449 453
450 454 @LoginRequired()
451 455 @HasRepoGroupPermissionAnyDecorator('group.admin')
452 456 @CSRFRequired()
453 457 def settings_post(self):
454 458 return self._settings_post()
455 459
456 460 @LoginRequired()
457 461 @HasRepoGroupPermissionAnyDecorator('group.admin')
458 462 def new_integration(self):
459 463 return self._new_integration()
@@ -1,624 +1,645 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database creation, and setup module for RhodeCode Enterprise. Used for creation
23 23 of database as well as for migration operations
24 24 """
25 25
26 26 import os
27 27 import sys
28 28 import time
29 29 import uuid
30 30 import logging
31 31 import getpass
32 32 from os.path import dirname as dn, join as jn
33 33
34 34 from sqlalchemy.engine import create_engine
35 35
36 36 from rhodecode import __dbversion__
37 37 from rhodecode.model import init_model
38 38 from rhodecode.model.user import UserModel
39 39 from rhodecode.model.db import (
40 40 User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm,
41 41 DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository)
42 42 from rhodecode.model.meta import Session, Base
43 43 from rhodecode.model.permission import PermissionModel
44 44 from rhodecode.model.repo import RepoModel
45 45 from rhodecode.model.repo_group import RepoGroupModel
46 46 from rhodecode.model.settings import SettingsModel
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 def notify(msg):
53 53 """
54 54 Notification for migrations messages
55 55 """
56 56 ml = len(msg) + (4 * 2)
57 57 print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper())
58 58
59 59
60 60 class DbManage(object):
61 61
62 62 def __init__(self, log_sql, dbconf, root, tests=False,
63 63 SESSION=None, cli_args=None):
64 64 self.dbname = dbconf.split('/')[-1]
65 65 self.tests = tests
66 66 self.root = root
67 67 self.dburi = dbconf
68 68 self.log_sql = log_sql
69 69 self.db_exists = False
70 70 self.cli_args = cli_args or {}
71 71 self.init_db(SESSION=SESSION)
72 72 self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask'))
73 73
74 74 def get_ask_ok_func(self, param):
75 75 if param not in [None]:
76 76 # return a function lambda that has a default set to param
77 77 return lambda *args, **kwargs: param
78 78 else:
79 79 from rhodecode.lib.utils import ask_ok
80 80 return ask_ok
81 81
82 82 def init_db(self, SESSION=None):
83 83 if SESSION:
84 84 self.sa = SESSION
85 85 else:
86 86 # init new sessions
87 87 engine = create_engine(self.dburi, echo=self.log_sql)
88 88 init_model(engine)
89 89 self.sa = Session()
90 90
91 91 def create_tables(self, override=False):
92 92 """
93 93 Create a auth database
94 94 """
95 95
96 96 log.info("Existing database with the same name is going to be destroyed.")
97 97 log.info("Setup command will run DROP ALL command on that database.")
98 98 if self.tests:
99 99 destroy = True
100 100 else:
101 101 destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]')
102 102 if not destroy:
103 103 log.info('Nothing done.')
104 104 sys.exit(0)
105 105 if destroy:
106 106 Base.metadata.drop_all()
107 107
108 108 checkfirst = not override
109 109 Base.metadata.create_all(checkfirst=checkfirst)
110 110 log.info('Created tables for %s', self.dbname)
111 111
112 112 def set_db_version(self):
113 113 ver = DbMigrateVersion()
114 114 ver.version = __dbversion__
115 115 ver.repository_id = 'rhodecode_db_migrations'
116 116 ver.repository_path = 'versions'
117 117 self.sa.add(ver)
118 118 log.info('db version set to: %s', __dbversion__)
119 119
120 120 def run_pre_migration_tasks(self):
121 121 """
122 122 Run various tasks before actually doing migrations
123 123 """
124 124 # delete cache keys on each upgrade
125 125 total = CacheKey.query().count()
126 126 log.info("Deleting (%s) cache keys now...", total)
127 127 CacheKey.delete_all_cache()
128 128
129 129 def upgrade(self, version=None):
130 130 """
131 131 Upgrades given database schema to given revision following
132 132 all needed steps, to perform the upgrade
133 133
134 134 """
135 135
136 136 from rhodecode.lib.dbmigrate.migrate.versioning import api
137 137 from rhodecode.lib.dbmigrate.migrate.exceptions import \
138 138 DatabaseNotControlledError
139 139
140 140 if 'sqlite' in self.dburi:
141 141 print(
142 142 '********************** WARNING **********************\n'
143 143 'Make sure your version of sqlite is at least 3.7.X. \n'
144 144 'Earlier versions are known to fail on some migrations\n'
145 145 '*****************************************************\n')
146 146
147 147 upgrade = self.ask_ok(
148 148 'You are about to perform a database upgrade. Make '
149 149 'sure you have backed up your database. '
150 150 'Continue ? [y/n]')
151 151 if not upgrade:
152 152 log.info('No upgrade performed')
153 153 sys.exit(0)
154 154
155 155 repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
156 156 'rhodecode/lib/dbmigrate')
157 157 db_uri = self.dburi
158 158
159 159 if version:
160 160 DbMigrateVersion.set_version(version)
161 161
162 162 try:
163 163 curr_version = api.db_version(db_uri, repository_path)
164 164 msg = ('Found current database db_uri under version '
165 165 'control with version {}'.format(curr_version))
166 166
167 167 except (RuntimeError, DatabaseNotControlledError):
168 168 curr_version = 1
169 169 msg = ('Current database is not under version control. Setting '
170 170 'as version %s' % curr_version)
171 171 api.version_control(db_uri, repository_path, curr_version)
172 172
173 173 notify(msg)
174 174
175 175 self.run_pre_migration_tasks()
176 176
177 177 if curr_version == __dbversion__:
178 178 log.info('This database is already at the newest version')
179 179 sys.exit(0)
180 180
181 181 upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
182 182 notify('attempting to upgrade database from '
183 183 'version %s to version %s' % (curr_version, __dbversion__))
184 184
185 185 # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
186 186 _step = None
187 187 for step in upgrade_steps:
188 188 notify('performing upgrade step %s' % step)
189 189 time.sleep(0.5)
190 190
191 191 api.upgrade(db_uri, repository_path, step)
192 192 self.sa.rollback()
193 193 notify('schema upgrade for step %s completed' % (step,))
194 194
195 195 _step = step
196 196
197 197 notify('upgrade to version %s successful' % _step)
198 198
199 199 def fix_repo_paths(self):
200 200 """
201 201 Fixes an old RhodeCode version path into new one without a '*'
202 202 """
203 203
204 204 paths = self.sa.query(RhodeCodeUi)\
205 205 .filter(RhodeCodeUi.ui_key == '/')\
206 206 .scalar()
207 207
208 208 paths.ui_value = paths.ui_value.replace('*', '')
209 209
210 210 try:
211 211 self.sa.add(paths)
212 212 self.sa.commit()
213 213 except Exception:
214 214 self.sa.rollback()
215 215 raise
216 216
217 217 def fix_default_user(self):
218 218 """
219 219 Fixes an old default user with some 'nicer' default values,
220 220 used mostly for anonymous access
221 221 """
222 222 def_user = self.sa.query(User)\
223 223 .filter(User.username == User.DEFAULT_USER)\
224 224 .one()
225 225
226 226 def_user.name = 'Anonymous'
227 227 def_user.lastname = 'User'
228 228 def_user.email = User.DEFAULT_USER_EMAIL
229 229
230 230 try:
231 231 self.sa.add(def_user)
232 232 self.sa.commit()
233 233 except Exception:
234 234 self.sa.rollback()
235 235 raise
236 236
237 237 def fix_settings(self):
238 238 """
239 239 Fixes rhodecode settings and adds ga_code key for google analytics
240 240 """
241 241
242 242 hgsettings3 = RhodeCodeSetting('ga_code', '')
243 243
244 244 try:
245 245 self.sa.add(hgsettings3)
246 246 self.sa.commit()
247 247 except Exception:
248 248 self.sa.rollback()
249 249 raise
250 250
251 251 def create_admin_and_prompt(self):
252 252
253 253 # defaults
254 254 defaults = self.cli_args
255 255 username = defaults.get('username')
256 256 password = defaults.get('password')
257 257 email = defaults.get('email')
258 258
259 259 if username is None:
260 260 username = raw_input('Specify admin username:')
261 261 if password is None:
262 262 password = self._get_admin_password()
263 263 if not password:
264 264 # second try
265 265 password = self._get_admin_password()
266 266 if not password:
267 267 sys.exit()
268 268 if email is None:
269 269 email = raw_input('Specify admin email:')
270 270 api_key = self.cli_args.get('api_key')
271 271 self.create_user(username, password, email, True,
272 272 strict_creation_check=False,
273 273 api_key=api_key)
274 274
275 275 def _get_admin_password(self):
276 276 password = getpass.getpass('Specify admin password '
277 277 '(min 6 chars):')
278 278 confirm = getpass.getpass('Confirm password:')
279 279
280 280 if password != confirm:
281 281 log.error('passwords mismatch')
282 282 return False
283 283 if len(password) < 6:
284 284 log.error('password is too short - use at least 6 characters')
285 285 return False
286 286
287 287 return password
288 288
289 289 def create_test_admin_and_users(self):
290 290 log.info('creating admin and regular test users')
291 291 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \
292 292 TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
293 293 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
294 294 TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
295 295 TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
296 296
297 297 self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
298 298 TEST_USER_ADMIN_EMAIL, True, api_key=True)
299 299
300 300 self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
301 301 TEST_USER_REGULAR_EMAIL, False, api_key=True)
302 302
303 303 self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
304 304 TEST_USER_REGULAR2_EMAIL, False, api_key=True)
305 305
306 306 def create_ui_settings(self, repo_store_path):
307 307 """
308 308 Creates ui settings, fills out hooks
309 309 and disables dotencode
310 310 """
311 311 settings_model = SettingsModel(sa=self.sa)
312 312 from rhodecode.lib.vcs.backends.hg import largefiles_store
313 313 from rhodecode.lib.vcs.backends.git import lfs_store
314 314
315 315 # Build HOOKS
316 316 hooks = [
317 317 (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'),
318 318
319 319 # HG
320 320 (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'),
321 321 (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'),
322 322 (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'),
323 323 (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'),
324 324 (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'),
325 325 (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'),
326 326
327 327 ]
328 328
329 329 for key, value in hooks:
330 330 hook_obj = settings_model.get_ui_by_key(key)
331 331 hooks2 = hook_obj if hook_obj else RhodeCodeUi()
332 332 hooks2.ui_section = 'hooks'
333 333 hooks2.ui_key = key
334 334 hooks2.ui_value = value
335 335 self.sa.add(hooks2)
336 336
337 337 # enable largefiles
338 338 largefiles = RhodeCodeUi()
339 339 largefiles.ui_section = 'extensions'
340 340 largefiles.ui_key = 'largefiles'
341 341 largefiles.ui_value = ''
342 342 self.sa.add(largefiles)
343 343
344 344 # set default largefiles cache dir, defaults to
345 345 # /repo_store_location/.cache/largefiles
346 346 largefiles = RhodeCodeUi()
347 347 largefiles.ui_section = 'largefiles'
348 348 largefiles.ui_key = 'usercache'
349 349 largefiles.ui_value = largefiles_store(repo_store_path)
350 350
351 351 self.sa.add(largefiles)
352 352
353 353 # set default lfs cache dir, defaults to
354 354 # /repo_store_location/.cache/lfs_store
355 355 lfsstore = RhodeCodeUi()
356 356 lfsstore.ui_section = 'vcs_git_lfs'
357 357 lfsstore.ui_key = 'store_location'
358 358 lfsstore.ui_value = lfs_store(repo_store_path)
359 359
360 360 self.sa.add(lfsstore)
361 361
362 362 # enable hgsubversion disabled by default
363 363 hgsubversion = RhodeCodeUi()
364 364 hgsubversion.ui_section = 'extensions'
365 365 hgsubversion.ui_key = 'hgsubversion'
366 366 hgsubversion.ui_value = ''
367 367 hgsubversion.ui_active = False
368 368 self.sa.add(hgsubversion)
369 369
370 370 # enable hgevolve disabled by default
371 371 hgevolve = RhodeCodeUi()
372 372 hgevolve.ui_section = 'extensions'
373 373 hgevolve.ui_key = 'evolve'
374 374 hgevolve.ui_value = ''
375 375 hgevolve.ui_active = False
376 376 self.sa.add(hgevolve)
377 377
378 hgevolve = RhodeCodeUi()
379 hgevolve.ui_section = 'experimental'
380 hgevolve.ui_key = 'evolution'
381 hgevolve.ui_value = ''
382 hgevolve.ui_active = False
383 self.sa.add(hgevolve)
384
385 hgevolve = RhodeCodeUi()
386 hgevolve.ui_section = 'experimental'
387 hgevolve.ui_key = 'evolution.exchange'
388 hgevolve.ui_value = ''
389 hgevolve.ui_active = False
390 self.sa.add(hgevolve)
391
392 hgevolve = RhodeCodeUi()
393 hgevolve.ui_section = 'extensions'
394 hgevolve.ui_key = 'topic'
395 hgevolve.ui_value = ''
396 hgevolve.ui_active = False
397 self.sa.add(hgevolve)
398
378 399 # enable hggit disabled by default
379 400 hggit = RhodeCodeUi()
380 401 hggit.ui_section = 'extensions'
381 402 hggit.ui_key = 'hggit'
382 403 hggit.ui_value = ''
383 404 hggit.ui_active = False
384 405 self.sa.add(hggit)
385 406
386 407 # set svn branch defaults
387 408 branches = ["/branches/*", "/trunk"]
388 409 tags = ["/tags/*"]
389 410
390 411 for branch in branches:
391 412 settings_model.create_ui_section_value(
392 413 RhodeCodeUi.SVN_BRANCH_ID, branch)
393 414
394 415 for tag in tags:
395 416 settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag)
396 417
397 418 def create_auth_plugin_options(self, skip_existing=False):
398 419 """
399 420 Create default auth plugin settings, and make it active
400 421
401 422 :param skip_existing:
402 423 """
403 424
404 425 for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'),
405 426 ('auth_rhodecode_enabled', 'True', 'bool')]:
406 427 if (skip_existing and
407 428 SettingsModel().get_setting_by_name(k) is not None):
408 429 log.debug('Skipping option %s', k)
409 430 continue
410 431 setting = RhodeCodeSetting(k, v, t)
411 432 self.sa.add(setting)
412 433
413 434 def create_default_options(self, skip_existing=False):
414 435 """Creates default settings"""
415 436
416 437 for k, v, t in [
417 438 ('default_repo_enable_locking', False, 'bool'),
418 439 ('default_repo_enable_downloads', False, 'bool'),
419 440 ('default_repo_enable_statistics', False, 'bool'),
420 441 ('default_repo_private', False, 'bool'),
421 442 ('default_repo_type', 'hg', 'unicode')]:
422 443
423 444 if (skip_existing and
424 445 SettingsModel().get_setting_by_name(k) is not None):
425 446 log.debug('Skipping option %s', k)
426 447 continue
427 448 setting = RhodeCodeSetting(k, v, t)
428 449 self.sa.add(setting)
429 450
430 451 def fixup_groups(self):
431 452 def_usr = User.get_default_user()
432 453 for g in RepoGroup.query().all():
433 454 g.group_name = g.get_new_name(g.name)
434 455 self.sa.add(g)
435 456 # get default perm
436 457 default = UserRepoGroupToPerm.query()\
437 458 .filter(UserRepoGroupToPerm.group == g)\
438 459 .filter(UserRepoGroupToPerm.user == def_usr)\
439 460 .scalar()
440 461
441 462 if default is None:
442 463 log.debug('missing default permission for group %s adding', g)
443 464 perm_obj = RepoGroupModel()._create_default_perms(g)
444 465 self.sa.add(perm_obj)
445 466
446 467 def reset_permissions(self, username):
447 468 """
448 469 Resets permissions to default state, useful when old systems had
449 470 bad permissions, we must clean them up
450 471
451 472 :param username:
452 473 """
453 474 default_user = User.get_by_username(username)
454 475 if not default_user:
455 476 return
456 477
457 478 u2p = UserToPerm.query()\
458 479 .filter(UserToPerm.user == default_user).all()
459 480 fixed = False
460 481 if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
461 482 for p in u2p:
462 483 Session().delete(p)
463 484 fixed = True
464 485 self.populate_default_permissions()
465 486 return fixed
466 487
467 488 def update_repo_info(self):
468 489 RepoModel.update_repoinfo()
469 490
470 491 def config_prompt(self, test_repo_path='', retries=3):
471 492 defaults = self.cli_args
472 493 _path = defaults.get('repos_location')
473 494 if retries == 3:
474 495 log.info('Setting up repositories config')
475 496
476 497 if _path is not None:
477 498 path = _path
478 499 elif not self.tests and not test_repo_path:
479 500 path = raw_input(
480 501 'Enter a valid absolute path to store repositories. '
481 502 'All repositories in that path will be added automatically:'
482 503 )
483 504 else:
484 505 path = test_repo_path
485 506 path_ok = True
486 507
487 508 # check proper dir
488 509 if not os.path.isdir(path):
489 510 path_ok = False
490 511 log.error('Given path %s is not a valid directory', path)
491 512
492 513 elif not os.path.isabs(path):
493 514 path_ok = False
494 515 log.error('Given path %s is not an absolute path', path)
495 516
496 517 # check if path is at least readable.
497 518 if not os.access(path, os.R_OK):
498 519 path_ok = False
499 520 log.error('Given path %s is not readable', path)
500 521
501 522 # check write access, warn user about non writeable paths
502 523 elif not os.access(path, os.W_OK) and path_ok:
503 524 log.warning('No write permission to given path %s', path)
504 525
505 526 q = ('Given path %s is not writeable, do you want to '
506 527 'continue with read only mode ? [y/n]' % (path,))
507 528 if not self.ask_ok(q):
508 529 log.error('Canceled by user')
509 530 sys.exit(-1)
510 531
511 532 if retries == 0:
512 533 sys.exit('max retries reached')
513 534 if not path_ok:
514 535 retries -= 1
515 536 return self.config_prompt(test_repo_path, retries)
516 537
517 538 real_path = os.path.normpath(os.path.realpath(path))
518 539
519 540 if real_path != os.path.normpath(path):
520 541 q = ('Path looks like a symlink, RhodeCode Enterprise will store '
521 542 'given path as %s ? [y/n]') % (real_path,)
522 543 if not self.ask_ok(q):
523 544 log.error('Canceled by user')
524 545 sys.exit(-1)
525 546
526 547 return real_path
527 548
528 549 def create_settings(self, path):
529 550
530 551 self.create_ui_settings(path)
531 552
532 553 ui_config = [
533 554 ('web', 'push_ssl', 'False'),
534 555 ('web', 'allow_archive', 'gz zip bz2'),
535 556 ('web', 'allow_push', '*'),
536 557 ('web', 'baseurl', '/'),
537 558 ('paths', '/', path),
538 559 ('phases', 'publish', 'True')
539 560 ]
540 561 for section, key, value in ui_config:
541 562 ui_conf = RhodeCodeUi()
542 563 setattr(ui_conf, 'ui_section', section)
543 564 setattr(ui_conf, 'ui_key', key)
544 565 setattr(ui_conf, 'ui_value', value)
545 566 self.sa.add(ui_conf)
546 567
547 568 # rhodecode app settings
548 569 settings = [
549 570 ('realm', 'RhodeCode', 'unicode'),
550 571 ('title', '', 'unicode'),
551 572 ('pre_code', '', 'unicode'),
552 573 ('post_code', '', 'unicode'),
553 574 ('show_public_icon', True, 'bool'),
554 575 ('show_private_icon', True, 'bool'),
555 576 ('stylify_metatags', False, 'bool'),
556 577 ('dashboard_items', 100, 'int'),
557 578 ('admin_grid_items', 25, 'int'),
558 579 ('show_version', True, 'bool'),
559 580 ('use_gravatar', False, 'bool'),
560 581 ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
561 582 ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
562 583 ('support_url', '', 'unicode'),
563 584 ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'),
564 585 ('show_revision_number', True, 'bool'),
565 586 ('show_sha_length', 12, 'int'),
566 587 ]
567 588
568 589 for key, val, type_ in settings:
569 590 sett = RhodeCodeSetting(key, val, type_)
570 591 self.sa.add(sett)
571 592
572 593 self.create_auth_plugin_options()
573 594 self.create_default_options()
574 595
575 596 log.info('created ui config')
576 597
577 598 def create_user(self, username, password, email='', admin=False,
578 599 strict_creation_check=True, api_key=None):
579 600 log.info('creating user `%s`', username)
580 601 user = UserModel().create_or_update(
581 602 username, password, email, firstname=u'RhodeCode', lastname=u'Admin',
582 603 active=True, admin=admin, extern_type="rhodecode",
583 604 strict_creation_check=strict_creation_check)
584 605
585 606 if api_key:
586 607 log.info('setting a new default auth token for user `%s`', username)
587 608 UserModel().add_auth_token(
588 609 user=user, lifetime_minutes=-1,
589 610 role=UserModel.auth_token_role.ROLE_ALL,
590 611 description=u'BUILTIN TOKEN')
591 612
592 613 def create_default_user(self):
593 614 log.info('creating default user')
594 615 # create default user for handling default permissions.
595 616 user = UserModel().create_or_update(username=User.DEFAULT_USER,
596 617 password=str(uuid.uuid1())[:20],
597 618 email=User.DEFAULT_USER_EMAIL,
598 619 firstname=u'Anonymous',
599 620 lastname=u'User',
600 621 strict_creation_check=False)
601 622 # based on configuration options activate/de-activate this user which
602 623 # controlls anonymous access
603 624 if self.cli_args.get('public_access') is False:
604 625 log.info('Public access disabled')
605 626 user.active = False
606 627 Session().add(user)
607 628 Session().commit()
608 629
609 630 def create_permissions(self):
610 631 """
611 632 Creates all permissions defined in the system
612 633 """
613 634 # module.(access|create|change|delete)_[name]
614 635 # module.(none|read|write|admin)
615 636 log.info('creating permissions')
616 637 PermissionModel(self.sa).create_permissions()
617 638
618 639 def populate_default_permissions(self):
619 640 """
620 641 Populate default permissions. It will create only the default
621 642 permissions that are missing, and not alter already defined ones
622 643 """
623 644 log.info('creating default user permissions')
624 645 PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER)
@@ -1,169 +1,169 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import webob
22 22 from pyramid.threadlocal import get_current_request
23 23
24 24 from rhodecode import events
25 25 from rhodecode.lib import hooks_base
26 26 from rhodecode.lib import utils2
27 27
28 28
29 29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
30 30 # TODO: johbo: Replace by vcs_operation_context and remove fully
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 check_locking = action in ('pull', 'push')
33 33
34 34 request = get_current_request()
35 35
36 36 # default
37 37 dummy_environ = webob.Request.blank('').environ
38 38 try:
39 39 environ = request.environ or dummy_environ
40 40 except TypeError:
41 41 # we might use this outside of request context
42 42 environ = dummy_environ
43 43
44 44 extras = vcs_operation_context(
45 45 environ, repo_name, username, action, repo_alias, check_locking)
46 46 return utils2.AttributeDict(extras)
47 47
48 48
49 49 def trigger_post_push_hook(
50 50 username, action, hook_type, repo_name, repo_alias, commit_ids):
51 51 """
52 52 Triggers push action hooks
53 53
54 54 :param username: username who pushes
55 55 :param action: push/push_local/push_remote
56 56 :param repo_name: name of repo
57 57 :param repo_alias: the type of SCM repo
58 58 :param commit_ids: list of commit ids that we pushed
59 59 """
60 60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
61 61 extras.commit_ids = commit_ids
62 62 extras.hook_type = hook_type
63 63 hooks_base.post_push(extras)
64 64
65 65
66 66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
67 67 pull_request, data=None):
68 68 """
69 69 Triggers create pull request action hooks
70 70
71 71 :param username: username who creates the pull request
72 72 :param repo_name: name of target repo
73 73 :param repo_alias: the type of SCM target repo
74 74 :param pull_request: the pull request that was created
75 75 :param data: extra data for specific events e.g {'comment': comment_obj}
76 76 """
77 77 if repo_alias not in ('hg', 'git'):
78 78 return
79 79
80 80 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
81 81 'create_pull_request')
82 82 events.trigger(events.PullRequestCreateEvent(pull_request))
83 extras.update(pull_request.get_api_data())
83 extras.update(pull_request.get_api_data(with_merge_state=False))
84 84 hooks_base.log_create_pull_request(**extras)
85 85
86 86
87 87 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
88 88 pull_request, data=None):
89 89 """
90 90 Triggers merge pull request action hooks
91 91
92 92 :param username: username who creates the pull request
93 93 :param repo_name: name of target repo
94 94 :param repo_alias: the type of SCM target repo
95 95 :param pull_request: the pull request that was merged
96 96 :param data: extra data for specific events e.g {'comment': comment_obj}
97 97 """
98 98 if repo_alias not in ('hg', 'git'):
99 99 return
100 100
101 101 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
102 102 'merge_pull_request')
103 103 events.trigger(events.PullRequestMergeEvent(pull_request))
104 104 extras.update(pull_request.get_api_data())
105 105 hooks_base.log_merge_pull_request(**extras)
106 106
107 107
108 108 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
109 109 pull_request, data=None):
110 110 """
111 111 Triggers close pull request action hooks
112 112
113 113 :param username: username who creates the pull request
114 114 :param repo_name: name of target repo
115 115 :param repo_alias: the type of SCM target repo
116 116 :param pull_request: the pull request that was closed
117 117 :param data: extra data for specific events e.g {'comment': comment_obj}
118 118 """
119 119 if repo_alias not in ('hg', 'git'):
120 120 return
121 121
122 122 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
123 123 'close_pull_request')
124 124 events.trigger(events.PullRequestCloseEvent(pull_request))
125 125 extras.update(pull_request.get_api_data())
126 126 hooks_base.log_close_pull_request(**extras)
127 127
128 128
129 129 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
130 130 pull_request, data=None):
131 131 """
132 132 Triggers review status change pull request action hooks
133 133
134 134 :param username: username who creates the pull request
135 135 :param repo_name: name of target repo
136 136 :param repo_alias: the type of SCM target repo
137 137 :param pull_request: the pull request that review status changed
138 138 :param data: extra data for specific events e.g {'comment': comment_obj}
139 139 """
140 140 if repo_alias not in ('hg', 'git'):
141 141 return
142 142
143 143 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
144 144 'review_pull_request')
145 145 status = data.get('status')
146 146 events.trigger(events.PullRequestReviewEvent(pull_request, status))
147 147 extras.update(pull_request.get_api_data())
148 148 hooks_base.log_review_pull_request(**extras)
149 149
150 150
151 151 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
152 152 pull_request, data=None):
153 153 """
154 154 Triggers update pull request action hooks
155 155
156 156 :param username: username who creates the pull request
157 157 :param repo_name: name of target repo
158 158 :param repo_alias: the type of SCM target repo
159 159 :param pull_request: the pull request that was updated
160 160 :param data: extra data for specific events e.g {'comment': comment_obj}
161 161 """
162 162 if repo_alias not in ('hg', 'git'):
163 163 return
164 164
165 165 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
166 166 'update_pull_request')
167 167 events.trigger(events.PullRequestUpdateEvent(pull_request))
168 168 extras.update(pull_request.get_api_data())
169 169 hooks_base.log_update_pull_request(**extras)
@@ -1,781 +1,782 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import sys
32 32 import shutil
33 33 import tempfile
34 34 import traceback
35 35 import tarfile
36 36 import warnings
37 37 import hashlib
38 38 from os.path import join as jn
39 39
40 40 import paste
41 41 import pkg_resources
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43 from mako import exceptions
44 44 from pyramid.threadlocal import get_current_registry
45 45 from rhodecode.lib.request import Request
46 46
47 47 from rhodecode.lib.vcs.backends.base import Config
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 50 from rhodecode.lib.utils2 import (
51 51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def repo_name_slug(value):
80 80 """
81 81 Return slug of name of repository
82 82 This function is called on each creation/modification
83 83 of repository to prevent bad names in repo
84 84 """
85 85 replacement_char = '-'
86 86
87 87 slug = remove_formatting(value)
88 88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 89 slug = re.sub('[\s]+', '-', slug)
90 90 slug = collapse(slug, replacement_char)
91 91 return slug
92 92
93 93
94 94 #==============================================================================
95 95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 96 #==============================================================================
97 97 def get_repo_slug(request):
98 98 _repo = ''
99 99
100 100 if hasattr(request, 'db_repo'):
101 101 # if our requests has set db reference use it for name, this
102 102 # translates the example.com/_<id> into proper repo names
103 103 _repo = request.db_repo.repo_name
104 104 elif getattr(request, 'matchdict', None):
105 105 # pyramid
106 106 _repo = request.matchdict.get('repo_name')
107 107
108 108 if _repo:
109 109 _repo = _repo.rstrip('/')
110 110 return _repo
111 111
112 112
113 113 def get_repo_group_slug(request):
114 114 _group = ''
115 115 if hasattr(request, 'db_repo_group'):
116 116 # if our requests has set db reference use it for name, this
117 117 # translates the example.com/_<id> into proper repo group names
118 118 _group = request.db_repo_group.group_name
119 119 elif getattr(request, 'matchdict', None):
120 120 # pyramid
121 121 _group = request.matchdict.get('repo_group_name')
122 122
123 123 if _group:
124 124 _group = _group.rstrip('/')
125 125 return _group
126 126
127 127
128 128 def get_user_group_slug(request):
129 129 _user_group = ''
130 130
131 131 if hasattr(request, 'db_user_group'):
132 132 _user_group = request.db_user_group.users_group_name
133 133 elif getattr(request, 'matchdict', None):
134 134 # pyramid
135 135 _user_group = request.matchdict.get('user_group_id')
136 136 _user_group_name = request.matchdict.get('user_group_name')
137 137 try:
138 138 if _user_group:
139 139 _user_group = UserGroup.get(_user_group)
140 140 elif _user_group_name:
141 141 _user_group = UserGroup.get_by_group_name(_user_group_name)
142 142
143 143 if _user_group:
144 144 _user_group = _user_group.users_group_name
145 145 except Exception:
146 146 log.exception('Failed to get user group by id and name')
147 147 # catch all failures here
148 148 return None
149 149
150 150 return _user_group
151 151
152 152
153 153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
154 154 """
155 155 Scans given path for repos and return (name,(type,path)) tuple
156 156
157 157 :param path: path to scan for repositories
158 158 :param recursive: recursive search and return names with subdirs in front
159 159 """
160 160
161 161 # remove ending slash for better results
162 162 path = path.rstrip(os.sep)
163 163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
164 164
165 165 def _get_repos(p):
166 166 dirpaths = _get_dirpaths(p)
167 167 if not _is_dir_writable(p):
168 168 log.warning('repo path without write access: %s', p)
169 169
170 170 for dirpath in dirpaths:
171 171 if os.path.isfile(os.path.join(p, dirpath)):
172 172 continue
173 173 cur_path = os.path.join(p, dirpath)
174 174
175 175 # skip removed repos
176 176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
177 177 continue
178 178
179 179 #skip .<somethin> dirs
180 180 if dirpath.startswith('.'):
181 181 continue
182 182
183 183 try:
184 184 scm_info = get_scm(cur_path)
185 185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
186 186 except VCSError:
187 187 if not recursive:
188 188 continue
189 189 #check if this dir containts other repos for recursive scan
190 190 rec_path = os.path.join(p, dirpath)
191 191 if os.path.isdir(rec_path):
192 192 for inner_scm in _get_repos(rec_path):
193 193 yield inner_scm
194 194
195 195 return _get_repos(path)
196 196
197 197
198 198 def _get_dirpaths(p):
199 199 try:
200 200 # OS-independable way of checking if we have at least read-only
201 201 # access or not.
202 202 dirpaths = os.listdir(p)
203 203 except OSError:
204 204 log.warning('ignoring repo path without read access: %s', p)
205 205 return []
206 206
207 207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 208 # decode paths and suddenly returns unicode objects itself. The items it
209 209 # cannot decode are returned as strings and cause issues.
210 210 #
211 211 # Those paths are ignored here until a solid solution for path handling has
212 212 # been built.
213 213 expected_type = type(p)
214 214
215 215 def _has_correct_type(item):
216 216 if type(item) is not expected_type:
217 217 log.error(
218 218 u"Ignoring path %s since it cannot be decoded into unicode.",
219 219 # Using "repr" to make sure that we see the byte value in case
220 220 # of support.
221 221 repr(item))
222 222 return False
223 223 return True
224 224
225 225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 226
227 227 return dirpaths
228 228
229 229
230 230 def _is_dir_writable(path):
231 231 """
232 232 Probe if `path` is writable.
233 233
234 234 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 235 possible to create a file inside of `path`, stat does not produce reliable
236 236 results in this case.
237 237 """
238 238 try:
239 239 with tempfile.TemporaryFile(dir=path):
240 240 pass
241 241 except OSError:
242 242 return False
243 243 return True
244 244
245 245
246 246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 247 """
248 248 Returns True if given path is a valid repository False otherwise.
249 249 If expect_scm param is given also, compare if given scm is the same
250 250 as expected from scm parameter. If explicit_scm is given don't try to
251 251 detect the scm, just use the given one to check if repo is valid
252 252
253 253 :param repo_name:
254 254 :param base_path:
255 255 :param expect_scm:
256 256 :param explicit_scm:
257 257 :param config:
258 258
259 259 :return True: if given path is a valid repository
260 260 """
261 261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 262 log.debug('Checking if `%s` is a valid path for repository. '
263 263 'Explicit type: %s', repo_name, explicit_scm)
264 264
265 265 try:
266 266 if explicit_scm:
267 267 detected_scms = [get_scm_backend(explicit_scm)(
268 268 full_path, config=config).alias]
269 269 else:
270 270 detected_scms = get_scm(full_path)
271 271
272 272 if expect_scm:
273 273 return detected_scms[0] == expect_scm
274 274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 275 return True
276 276 except VCSError:
277 277 log.debug('path: %s is not a valid repo !', full_path)
278 278 return False
279 279
280 280
281 281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 282 """
283 283 Returns True if given path is a repository group, False otherwise
284 284
285 285 :param repo_name:
286 286 :param base_path:
287 287 """
288 288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 289 log.debug('Checking if `%s` is a valid path for repository group',
290 290 repo_group_name)
291 291
292 292 # check if it's not a repo
293 293 if is_valid_repo(repo_group_name, base_path):
294 294 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 295 return False
296 296
297 297 try:
298 298 # we need to check bare git repos at higher level
299 299 # since we might match branches/hooks/info/objects or possible
300 300 # other things inside bare git repo
301 301 maybe_repo = os.path.dirname(full_path)
302 302 if maybe_repo == base_path:
303 303 # skip root level repo check, we know root location CANNOT BE a repo group
304 304 return False
305 305
306 306 scm_ = get_scm(maybe_repo)
307 307 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 308 return False
309 309 except VCSError:
310 310 pass
311 311
312 312 # check if it's a valid path
313 313 if skip_path_check or os.path.isdir(full_path):
314 314 log.debug('path: %s is a valid repo group !', full_path)
315 315 return True
316 316
317 317 log.debug('path: %s is not a valid repo group !', full_path)
318 318 return False
319 319
320 320
321 321 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 322 while True:
323 323 ok = raw_input(prompt)
324 324 if ok.lower() in ('y', 'ye', 'yes'):
325 325 return True
326 326 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 327 return False
328 328 retries = retries - 1
329 329 if retries < 0:
330 330 raise IOError
331 331 print(complaint)
332 332
333 333 # propagated from mercurial documentation
334 334 ui_sections = [
335 335 'alias', 'auth',
336 336 'decode/encode', 'defaults',
337 337 'diff', 'email',
338 338 'extensions', 'format',
339 339 'merge-patterns', 'merge-tools',
340 340 'hooks', 'http_proxy',
341 341 'smtp', 'patch',
342 342 'paths', 'profiling',
343 343 'server', 'trusted',
344 344 'ui', 'web', ]
345 345
346 346
347 347 def config_data_from_db(clear_session=True, repo=None):
348 348 """
349 349 Read the configuration data from the database and return configuration
350 350 tuples.
351 351 """
352 352 from rhodecode.model.settings import VcsSettingsModel
353 353
354 354 config = []
355 355
356 356 sa = meta.Session()
357 357 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358 358
359 359 ui_settings = settings_model.get_ui_settings()
360 360
361 361 ui_data = []
362 362 for setting in ui_settings:
363 363 if setting.active:
364 364 ui_data.append((setting.section, setting.key, setting.value))
365 365 config.append((
366 366 safe_str(setting.section), safe_str(setting.key),
367 367 safe_str(setting.value)))
368 368 if setting.key == 'push_ssl':
369 369 # force set push_ssl requirement to False, rhodecode
370 370 # handles that
371 371 config.append((
372 372 safe_str(setting.section), safe_str(setting.key), False))
373 373 log.debug(
374 'settings ui from db: %s',
374 'settings ui from db@repo[%s]: %s',
375 repo,
375 376 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
376 377 if clear_session:
377 378 meta.Session.remove()
378 379
379 380 # TODO: mikhail: probably it makes no sense to re-read hooks information.
380 381 # It's already there and activated/deactivated
381 382 skip_entries = []
382 383 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
383 384 if 'pull' not in enabled_hook_classes:
384 385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
385 386 if 'push' not in enabled_hook_classes:
386 387 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
387 388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
388 389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
389 390
390 391 config = [entry for entry in config if entry[:2] not in skip_entries]
391 392
392 393 return config
393 394
394 395
395 396 def make_db_config(clear_session=True, repo=None):
396 397 """
397 398 Create a :class:`Config` instance based on the values in the database.
398 399 """
399 400 config = Config()
400 401 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
401 402 for section, option, value in config_data:
402 403 config.set(section, option, value)
403 404 return config
404 405
405 406
406 407 def get_enabled_hook_classes(ui_settings):
407 408 """
408 409 Return the enabled hook classes.
409 410
410 411 :param ui_settings: List of ui_settings as returned
411 412 by :meth:`VcsSettingsModel.get_ui_settings`
412 413
413 414 :return: a list with the enabled hook classes. The order is not guaranteed.
414 415 :rtype: list
415 416 """
416 417 enabled_hooks = []
417 418 active_hook_keys = [
418 419 key for section, key, value, active in ui_settings
419 420 if section == 'hooks' and active]
420 421
421 422 hook_names = {
422 423 RhodeCodeUi.HOOK_PUSH: 'push',
423 424 RhodeCodeUi.HOOK_PULL: 'pull',
424 425 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
425 426 }
426 427
427 428 for key in active_hook_keys:
428 429 hook = hook_names.get(key)
429 430 if hook:
430 431 enabled_hooks.append(hook)
431 432
432 433 return enabled_hooks
433 434
434 435
435 436 def set_rhodecode_config(config):
436 437 """
437 438 Updates pyramid config with new settings from database
438 439
439 440 :param config:
440 441 """
441 442 from rhodecode.model.settings import SettingsModel
442 443 app_settings = SettingsModel().get_all_settings()
443 444
444 445 for k, v in app_settings.items():
445 446 config[k] = v
446 447
447 448
448 449 def get_rhodecode_realm():
449 450 """
450 451 Return the rhodecode realm from database.
451 452 """
452 453 from rhodecode.model.settings import SettingsModel
453 454 realm = SettingsModel().get_setting_by_name('realm')
454 455 return safe_str(realm.app_settings_value)
455 456
456 457
457 458 def get_rhodecode_base_path():
458 459 """
459 460 Returns the base path. The base path is the filesystem path which points
460 461 to the repository store.
461 462 """
462 463 from rhodecode.model.settings import SettingsModel
463 464 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
464 465 return safe_str(paths_ui.ui_value)
465 466
466 467
467 468 def map_groups(path):
468 469 """
469 470 Given a full path to a repository, create all nested groups that this
470 471 repo is inside. This function creates parent-child relationships between
471 472 groups and creates default perms for all new groups.
472 473
473 474 :param paths: full path to repository
474 475 """
475 476 from rhodecode.model.repo_group import RepoGroupModel
476 477 sa = meta.Session()
477 478 groups = path.split(Repository.NAME_SEP)
478 479 parent = None
479 480 group = None
480 481
481 482 # last element is repo in nested groups structure
482 483 groups = groups[:-1]
483 484 rgm = RepoGroupModel(sa)
484 485 owner = User.get_first_super_admin()
485 486 for lvl, group_name in enumerate(groups):
486 487 group_name = '/'.join(groups[:lvl] + [group_name])
487 488 group = RepoGroup.get_by_group_name(group_name)
488 489 desc = '%s group' % group_name
489 490
490 491 # skip folders that are now removed repos
491 492 if REMOVED_REPO_PAT.match(group_name):
492 493 break
493 494
494 495 if group is None:
495 496 log.debug('creating group level: %s group_name: %s',
496 497 lvl, group_name)
497 498 group = RepoGroup(group_name, parent)
498 499 group.group_description = desc
499 500 group.user = owner
500 501 sa.add(group)
501 502 perm_obj = rgm._create_default_perms(group)
502 503 sa.add(perm_obj)
503 504 sa.flush()
504 505
505 506 parent = group
506 507 return group
507 508
508 509
509 510 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
510 511 """
511 512 maps all repos given in initial_repo_list, non existing repositories
512 513 are created, if remove_obsolete is True it also checks for db entries
513 514 that are not in initial_repo_list and removes them.
514 515
515 516 :param initial_repo_list: list of repositories found by scanning methods
516 517 :param remove_obsolete: check for obsolete entries in database
517 518 """
518 519 from rhodecode.model.repo import RepoModel
519 520 from rhodecode.model.repo_group import RepoGroupModel
520 521 from rhodecode.model.settings import SettingsModel
521 522
522 523 sa = meta.Session()
523 524 repo_model = RepoModel()
524 525 user = User.get_first_super_admin()
525 526 added = []
526 527
527 528 # creation defaults
528 529 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
529 530 enable_statistics = defs.get('repo_enable_statistics')
530 531 enable_locking = defs.get('repo_enable_locking')
531 532 enable_downloads = defs.get('repo_enable_downloads')
532 533 private = defs.get('repo_private')
533 534
534 535 for name, repo in initial_repo_list.items():
535 536 group = map_groups(name)
536 537 unicode_name = safe_unicode(name)
537 538 db_repo = repo_model.get_by_repo_name(unicode_name)
538 539 # found repo that is on filesystem not in RhodeCode database
539 540 if not db_repo:
540 541 log.info('repository %s not found, creating now', name)
541 542 added.append(name)
542 543 desc = (repo.description
543 544 if repo.description != 'unknown'
544 545 else '%s repository' % name)
545 546
546 547 db_repo = repo_model._create_repo(
547 548 repo_name=name,
548 549 repo_type=repo.alias,
549 550 description=desc,
550 551 repo_group=getattr(group, 'group_id', None),
551 552 owner=user,
552 553 enable_locking=enable_locking,
553 554 enable_downloads=enable_downloads,
554 555 enable_statistics=enable_statistics,
555 556 private=private,
556 557 state=Repository.STATE_CREATED
557 558 )
558 559 sa.commit()
559 560 # we added that repo just now, and make sure we updated server info
560 561 if db_repo.repo_type == 'git':
561 562 git_repo = db_repo.scm_instance()
562 563 # update repository server-info
563 564 log.debug('Running update server info')
564 565 git_repo._update_server_info()
565 566
566 567 db_repo.update_commit_cache()
567 568
568 569 config = db_repo._config
569 570 config.set('extensions', 'largefiles', '')
570 571 repo = db_repo.scm_instance(config=config)
571 572 repo.install_hooks()
572 573
573 574 removed = []
574 575 if remove_obsolete:
575 576 # remove from database those repositories that are not in the filesystem
576 577 for repo in sa.query(Repository).all():
577 578 if repo.repo_name not in initial_repo_list.keys():
578 579 log.debug("Removing non-existing repository found in db `%s`",
579 580 repo.repo_name)
580 581 try:
581 582 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 583 sa.commit()
583 584 removed.append(repo.repo_name)
584 585 except Exception:
585 586 # don't hold further removals on error
586 587 log.error(traceback.format_exc())
587 588 sa.rollback()
588 589
589 590 def splitter(full_repo_name):
590 591 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 592 gr_name = None
592 593 if len(_parts) == 2:
593 594 gr_name = _parts[0]
594 595 return gr_name
595 596
596 597 initial_repo_group_list = [splitter(x) for x in
597 598 initial_repo_list.keys() if splitter(x)]
598 599
599 600 # remove from database those repository groups that are not in the
600 601 # filesystem due to parent child relationships we need to delete them
601 602 # in a specific order of most nested first
602 603 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 604 nested_sort = lambda gr: len(gr.split('/'))
604 605 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 606 if group_name not in initial_repo_group_list:
606 607 repo_group = RepoGroup.get_by_group_name(group_name)
607 608 if (repo_group.children.all() or
608 609 not RepoGroupModel().check_exist_filesystem(
609 610 group_name=group_name, exc_on_failure=False)):
610 611 continue
611 612
612 613 log.info(
613 614 'Removing non-existing repository group found in db `%s`',
614 615 group_name)
615 616 try:
616 617 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 618 sa.commit()
618 619 removed.append(group_name)
619 620 except Exception:
620 621 # don't hold further removals on error
621 622 log.exception(
622 623 'Unable to remove repository group `%s`',
623 624 group_name)
624 625 sa.rollback()
625 626 raise
626 627
627 628 return added, removed
628 629
629 630
630 631 def load_rcextensions(root_path):
631 632 import rhodecode
632 633 from rhodecode.config import conf
633 634
634 635 path = os.path.join(root_path)
635 636 sys.path.append(path)
636 637 try:
637 638 rcextensions = __import__('rcextensions')
638 639 except ImportError:
639 640 log.warn('Unable to load rcextensions from %s', path)
640 641 rcextensions = None
641 642
642 643 if rcextensions:
643 644 log.debug('Found rcextensions module loaded %s...', rcextensions)
644 645 rhodecode.EXTENSIONS = rcextensions
645 646
646 647 # Additional mappings that are not present in the pygments lexers
647 648 conf.LANGUAGES_EXTENSIONS_MAP.update(
648 649 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
649 650
650 651
651 652 def get_custom_lexer(extension):
652 653 """
653 654 returns a custom lexer if it is defined in rcextensions module, or None
654 655 if there's no custom lexer defined
655 656 """
656 657 import rhodecode
657 658 from pygments import lexers
658 659
659 660 # custom override made by RhodeCode
660 661 if extension in ['mako']:
661 662 return lexers.get_lexer_by_name('html+mako')
662 663
663 664 # check if we didn't define this extension as other lexer
664 665 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
665 666 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
666 667 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
667 668 return lexers.get_lexer_by_name(_lexer_name)
668 669
669 670
670 671 #==============================================================================
671 672 # TEST FUNCTIONS AND CREATORS
672 673 #==============================================================================
673 674 def create_test_index(repo_location, config):
674 675 """
675 676 Makes default test index.
676 677 """
677 678 import rc_testdata
678 679
679 680 rc_testdata.extract_search_index(
680 681 'vcs_search_index', os.path.dirname(config['search.location']))
681 682
682 683
683 684 def create_test_directory(test_path):
684 685 """
685 686 Create test directory if it doesn't exist.
686 687 """
687 688 if not os.path.isdir(test_path):
688 689 log.debug('Creating testdir %s', test_path)
689 690 os.makedirs(test_path)
690 691
691 692
692 693 def create_test_database(test_path, config):
693 694 """
694 695 Makes a fresh database.
695 696 """
696 697 from rhodecode.lib.db_manage import DbManage
697 698
698 699 # PART ONE create db
699 700 dbconf = config['sqlalchemy.db1.url']
700 701 log.debug('making test db %s', dbconf)
701 702
702 703 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
703 704 tests=True, cli_args={'force_ask': True})
704 705 dbmanage.create_tables(override=True)
705 706 dbmanage.set_db_version()
706 707 # for tests dynamically set new root paths based on generated content
707 708 dbmanage.create_settings(dbmanage.config_prompt(test_path))
708 709 dbmanage.create_default_user()
709 710 dbmanage.create_test_admin_and_users()
710 711 dbmanage.create_permissions()
711 712 dbmanage.populate_default_permissions()
712 713 Session().commit()
713 714
714 715
715 716 def create_test_repositories(test_path, config):
716 717 """
717 718 Creates test repositories in the temporary directory. Repositories are
718 719 extracted from archives within the rc_testdata package.
719 720 """
720 721 import rc_testdata
721 722 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
722 723
723 724 log.debug('making test vcs repositories')
724 725
725 726 idx_path = config['search.location']
726 727 data_path = config['cache_dir']
727 728
728 729 # clean index and data
729 730 if idx_path and os.path.exists(idx_path):
730 731 log.debug('remove %s', idx_path)
731 732 shutil.rmtree(idx_path)
732 733
733 734 if data_path and os.path.exists(data_path):
734 735 log.debug('remove %s', data_path)
735 736 shutil.rmtree(data_path)
736 737
737 738 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
738 739 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
739 740
740 741 # Note: Subversion is in the process of being integrated with the system,
741 742 # until we have a properly packed version of the test svn repository, this
742 743 # tries to copy over the repo from a package "rc_testdata"
743 744 svn_repo_path = rc_testdata.get_svn_repo_archive()
744 745 with tarfile.open(svn_repo_path) as tar:
745 746 tar.extractall(jn(test_path, SVN_REPO))
746 747
747 748
748 749 def password_changed(auth_user, session):
749 750 # Never report password change in case of default user or anonymous user.
750 751 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
751 752 return False
752 753
753 754 password_hash = md5(auth_user.password) if auth_user.password else None
754 755 rhodecode_user = session.get('rhodecode_user', {})
755 756 session_password_hash = rhodecode_user.get('password', '')
756 757 return password_hash != session_password_hash
757 758
758 759
759 760 def read_opensource_licenses():
760 761 global _license_cache
761 762
762 763 if not _license_cache:
763 764 licenses = pkg_resources.resource_string(
764 765 'rhodecode', 'config/licenses.json')
765 766 _license_cache = json.loads(licenses)
766 767
767 768 return _license_cache
768 769
769 770
770 771 def generate_platform_uuid():
771 772 """
772 773 Generates platform UUID based on it's name
773 774 """
774 775 import platform
775 776
776 777 try:
777 778 uuid_list = [platform.platform()]
778 779 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
779 780 except Exception as e:
780 781 log.error('Failed to generate host uuid: %s', e)
781 782 return 'UNDEFINED'
@@ -1,1846 +1,1849 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36 from pyramid import compat
37 37
38 38 from rhodecode.translation import lazy_ugettext
39 39 from rhodecode.lib.utils2 import safe_str, safe_unicode
40 40 from rhodecode.lib.vcs import connection
41 41 from rhodecode.lib.vcs.utils import author_name, author_email
42 42 from rhodecode.lib.vcs.conf import settings
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 48 RepositoryError)
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 FILEMODE_DEFAULT = 0o100644
55 55 FILEMODE_EXECUTABLE = 0o100755
56 56
57 57 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
58 58
59 59
60 60 class MergeFailureReason(object):
61 61 """
62 62 Enumeration with all the reasons why the server side merge could fail.
63 63
64 64 DO NOT change the number of the reasons, as they may be stored in the
65 65 database.
66 66
67 67 Changing the name of a reason is acceptable and encouraged to deprecate old
68 68 reasons.
69 69 """
70 70
71 71 # Everything went well.
72 72 NONE = 0
73 73
74 74 # An unexpected exception was raised. Check the logs for more details.
75 75 UNKNOWN = 1
76 76
77 77 # The merge was not successful, there are conflicts.
78 78 MERGE_FAILED = 2
79 79
80 80 # The merge succeeded but we could not push it to the target repository.
81 81 PUSH_FAILED = 3
82 82
83 83 # The specified target is not a head in the target repository.
84 84 TARGET_IS_NOT_HEAD = 4
85 85
86 86 # The source repository contains more branches than the target. Pushing
87 87 # the merge will create additional branches in the target.
88 88 HG_SOURCE_HAS_MORE_BRANCHES = 5
89 89
90 90 # The target reference has multiple heads. That does not allow to correctly
91 91 # identify the target location. This could only happen for mercurial
92 92 # branches.
93 93 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94 94
95 95 # The target repository is locked
96 96 TARGET_IS_LOCKED = 7
97 97
98 98 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 99 # A involved commit could not be found.
100 100 _DEPRECATED_MISSING_COMMIT = 8
101 101
102 102 # The target repo reference is missing.
103 103 MISSING_TARGET_REF = 9
104 104
105 105 # The source repo reference is missing.
106 106 MISSING_SOURCE_REF = 10
107 107
108 108 # The merge was not successful, there are conflicts related to sub
109 109 # repositories.
110 110 SUBREPO_MERGE_FAILED = 11
111 111
112 112
113 113 class UpdateFailureReason(object):
114 114 """
115 115 Enumeration with all the reasons why the pull request update could fail.
116 116
117 117 DO NOT change the number of the reasons, as they may be stored in the
118 118 database.
119 119
120 120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 121 reasons.
122 122 """
123 123
124 124 # Everything went well.
125 125 NONE = 0
126 126
127 127 # An unexpected exception was raised. Check the logs for more details.
128 128 UNKNOWN = 1
129 129
130 130 # The pull request is up to date.
131 131 NO_CHANGE = 2
132 132
133 133 # The pull request has a reference type that is not supported for update.
134 134 WRONG_REF_TYPE = 3
135 135
136 136 # Update failed because the target reference is missing.
137 137 MISSING_TARGET_REF = 4
138 138
139 139 # Update failed because the source reference is missing.
140 140 MISSING_SOURCE_REF = 5
141 141
142 142
143 143 class MergeResponse(object):
144 144
145 145 # uses .format(**metadata) for variables
146 146 MERGE_STATUS_MESSAGES = {
147 147 MergeFailureReason.NONE: lazy_ugettext(
148 148 u'This pull request can be automatically merged.'),
149 149 MergeFailureReason.UNKNOWN: lazy_ugettext(
150 150 u'This pull request cannot be merged because of an unhandled exception. '
151 151 u'{exception}'),
152 152 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
153 153 u'This pull request cannot be merged because of merge conflicts.'),
154 154 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
155 155 u'This pull request could not be merged because push to '
156 156 u'target:`{target}@{merge_commit}` failed.'),
157 157 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
158 158 u'This pull request cannot be merged because the target '
159 159 u'`{target_ref.name}` is not a head.'),
160 160 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
161 161 u'This pull request cannot be merged because the source contains '
162 162 u'more branches than the target.'),
163 163 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
164 u'This pull request cannot be merged because the target '
164 u'This pull request cannot be merged because the target `{target_ref.name}` '
165 165 u'has multiple heads: `{heads}`.'),
166 166 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target repository is '
168 168 u'locked by {locked_by}.'),
169 169
170 170 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
171 171 u'This pull request cannot be merged because the target '
172 172 u'reference `{target_ref.name}` is missing.'),
173 173 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the source '
175 175 u'reference `{source_ref.name}` is missing.'),
176 176 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
177 177 u'This pull request cannot be merged because of conflicts related '
178 178 u'to sub repositories.'),
179 179
180 180 # Deprecations
181 181 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
182 182 u'This pull request cannot be merged because the target or the '
183 183 u'source reference is missing.'),
184 184
185 185 }
186 186
187 187 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
188 188 self.possible = possible
189 189 self.executed = executed
190 190 self.merge_ref = merge_ref
191 191 self.failure_reason = failure_reason
192 192 self.metadata = metadata or {}
193 193
194 194 def __repr__(self):
195 195 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
196 196
197 197 def __eq__(self, other):
198 198 same_instance = isinstance(other, self.__class__)
199 199 return same_instance \
200 200 and self.possible == other.possible \
201 201 and self.executed == other.executed \
202 202 and self.failure_reason == other.failure_reason
203 203
204 204 @property
205 205 def label(self):
206 206 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
207 207 not k.startswith('_'))
208 208 return label_dict.get(self.failure_reason)
209 209
210 210 @property
211 211 def merge_status_message(self):
212 212 """
213 213 Return a human friendly error message for the given merge status code.
214 214 """
215 215 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
216 216 try:
217 217 return msg.format(**self.metadata)
218 218 except Exception:
219 219 log.exception('Failed to format %s message', self)
220 220 return msg
221 221
222 222 def asdict(self):
223 223 data = {}
224 224 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
225 225 'merge_status_message']:
226 226 data[k] = getattr(self, k)
227 227 return data
228 228
229 229
230 230 class BaseRepository(object):
231 231 """
232 232 Base Repository for final backends
233 233
234 234 .. attribute:: DEFAULT_BRANCH_NAME
235 235
236 236 name of default branch (i.e. "trunk" for svn, "master" for git etc.
237 237
238 238 .. attribute:: commit_ids
239 239
240 240 list of all available commit ids, in ascending order
241 241
242 242 .. attribute:: path
243 243
244 244 absolute path to the repository
245 245
246 246 .. attribute:: bookmarks
247 247
248 248 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
249 249 there are no bookmarks or the backend implementation does not support
250 250 bookmarks.
251 251
252 252 .. attribute:: tags
253 253
254 254 Mapping from name to :term:`Commit ID` of the tag.
255 255
256 256 """
257 257
258 258 DEFAULT_BRANCH_NAME = None
259 259 DEFAULT_CONTACT = u"Unknown"
260 260 DEFAULT_DESCRIPTION = u"unknown"
261 261 EMPTY_COMMIT_ID = '0' * 40
262 262
263 263 path = None
264 264
265 265 def __init__(self, repo_path, config=None, create=False, **kwargs):
266 266 """
267 267 Initializes repository. Raises RepositoryError if repository could
268 268 not be find at the given ``repo_path`` or directory at ``repo_path``
269 269 exists and ``create`` is set to True.
270 270
271 271 :param repo_path: local path of the repository
272 272 :param config: repository configuration
273 273 :param create=False: if set to True, would try to create repository.
274 274 :param src_url=None: if set, should be proper url from which repository
275 275 would be cloned; requires ``create`` parameter to be set to True -
276 276 raises RepositoryError if src_url is set and create evaluates to
277 277 False
278 278 """
279 279 raise NotImplementedError
280 280
281 281 def __repr__(self):
282 282 return '<%s at %s>' % (self.__class__.__name__, self.path)
283 283
284 284 def __len__(self):
285 285 return self.count()
286 286
287 287 def __eq__(self, other):
288 288 same_instance = isinstance(other, self.__class__)
289 289 return same_instance and other.path == self.path
290 290
291 291 def __ne__(self, other):
292 292 return not self.__eq__(other)
293 293
294 294 def get_create_shadow_cache_pr_path(self, db_repo):
295 295 path = db_repo.cached_diffs_dir
296 296 if not os.path.exists(path):
297 297 os.makedirs(path, 0o755)
298 298 return path
299 299
300 300 @classmethod
301 301 def get_default_config(cls, default=None):
302 302 config = Config()
303 303 if default and isinstance(default, list):
304 304 for section, key, val in default:
305 305 config.set(section, key, val)
306 306 return config
307 307
308 308 @LazyProperty
309 309 def _remote(self):
310 310 raise NotImplementedError
311 311
312 def _heads(self, branch=None):
313 return []
314
312 315 @LazyProperty
313 316 def EMPTY_COMMIT(self):
314 317 return EmptyCommit(self.EMPTY_COMMIT_ID)
315 318
316 319 @LazyProperty
317 320 def alias(self):
318 321 for k, v in settings.BACKENDS.items():
319 322 if v.split('.')[-1] == str(self.__class__.__name__):
320 323 return k
321 324
322 325 @LazyProperty
323 326 def name(self):
324 327 return safe_unicode(os.path.basename(self.path))
325 328
326 329 @LazyProperty
327 330 def description(self):
328 331 raise NotImplementedError
329 332
330 333 def refs(self):
331 334 """
332 335 returns a `dict` with branches, bookmarks, tags, and closed_branches
333 336 for this repository
334 337 """
335 338 return dict(
336 339 branches=self.branches,
337 340 branches_closed=self.branches_closed,
338 341 tags=self.tags,
339 342 bookmarks=self.bookmarks
340 343 )
341 344
342 345 @LazyProperty
343 346 def branches(self):
344 347 """
345 348 A `dict` which maps branch names to commit ids.
346 349 """
347 350 raise NotImplementedError
348 351
349 352 @LazyProperty
350 353 def branches_closed(self):
351 354 """
352 355 A `dict` which maps tags names to commit ids.
353 356 """
354 357 raise NotImplementedError
355 358
356 359 @LazyProperty
357 360 def bookmarks(self):
358 361 """
359 362 A `dict` which maps tags names to commit ids.
360 363 """
361 364 raise NotImplementedError
362 365
363 366 @LazyProperty
364 367 def tags(self):
365 368 """
366 369 A `dict` which maps tags names to commit ids.
367 370 """
368 371 raise NotImplementedError
369 372
370 373 @LazyProperty
371 374 def size(self):
372 375 """
373 376 Returns combined size in bytes for all repository files
374 377 """
375 378 tip = self.get_commit()
376 379 return tip.size
377 380
378 381 def size_at_commit(self, commit_id):
379 382 commit = self.get_commit(commit_id)
380 383 return commit.size
381 384
382 385 def is_empty(self):
383 386 return not bool(self.commit_ids)
384 387
385 388 @staticmethod
386 389 def check_url(url, config):
387 390 """
388 391 Function will check given url and try to verify if it's a valid
389 392 link.
390 393 """
391 394 raise NotImplementedError
392 395
393 396 @staticmethod
394 397 def is_valid_repository(path):
395 398 """
396 399 Check if given `path` contains a valid repository of this backend
397 400 """
398 401 raise NotImplementedError
399 402
400 403 # ==========================================================================
401 404 # COMMITS
402 405 # ==========================================================================
403 406
404 407 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
405 408 """
406 409 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
407 410 are both None, most recent commit is returned.
408 411
409 412 :param pre_load: Optional. List of commit attributes to load.
410 413
411 414 :raises ``EmptyRepositoryError``: if there are no commits
412 415 """
413 416 raise NotImplementedError
414 417
415 418 def __iter__(self):
416 419 for commit_id in self.commit_ids:
417 420 yield self.get_commit(commit_id=commit_id)
418 421
419 422 def get_commits(
420 423 self, start_id=None, end_id=None, start_date=None, end_date=None,
421 424 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
422 425 """
423 426 Returns iterator of `BaseCommit` objects from start to end
424 427 not inclusive. This should behave just like a list, ie. end is not
425 428 inclusive.
426 429
427 430 :param start_id: None or str, must be a valid commit id
428 431 :param end_id: None or str, must be a valid commit id
429 432 :param start_date:
430 433 :param end_date:
431 434 :param branch_name:
432 435 :param show_hidden:
433 436 :param pre_load:
434 437 :param translate_tags:
435 438 """
436 439 raise NotImplementedError
437 440
438 441 def __getitem__(self, key):
439 442 """
440 443 Allows index based access to the commit objects of this repository.
441 444 """
442 445 pre_load = ["author", "branch", "date", "message", "parents"]
443 446 if isinstance(key, slice):
444 447 return self._get_range(key, pre_load)
445 448 return self.get_commit(commit_idx=key, pre_load=pre_load)
446 449
447 450 def _get_range(self, slice_obj, pre_load):
448 451 for commit_id in self.commit_ids.__getitem__(slice_obj):
449 452 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
450 453
451 454 def count(self):
452 455 return len(self.commit_ids)
453 456
454 457 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
455 458 """
456 459 Creates and returns a tag for the given ``commit_id``.
457 460
458 461 :param name: name for new tag
459 462 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
460 463 :param commit_id: commit id for which new tag would be created
461 464 :param message: message of the tag's commit
462 465 :param date: date of tag's commit
463 466
464 467 :raises TagAlreadyExistError: if tag with same name already exists
465 468 """
466 469 raise NotImplementedError
467 470
468 471 def remove_tag(self, name, user, message=None, date=None):
469 472 """
470 473 Removes tag with the given ``name``.
471 474
472 475 :param name: name of the tag to be removed
473 476 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
474 477 :param message: message of the tag's removal commit
475 478 :param date: date of tag's removal commit
476 479
477 480 :raises TagDoesNotExistError: if tag with given name does not exists
478 481 """
479 482 raise NotImplementedError
480 483
481 484 def get_diff(
482 485 self, commit1, commit2, path=None, ignore_whitespace=False,
483 486 context=3, path1=None):
484 487 """
485 488 Returns (git like) *diff*, as plain text. Shows changes introduced by
486 489 `commit2` since `commit1`.
487 490
488 491 :param commit1: Entry point from which diff is shown. Can be
489 492 ``self.EMPTY_COMMIT`` - in this case, patch showing all
490 493 the changes since empty state of the repository until `commit2`
491 494 :param commit2: Until which commit changes should be shown.
492 495 :param path: Can be set to a path of a file to create a diff of that
493 496 file. If `path1` is also set, this value is only associated to
494 497 `commit2`.
495 498 :param ignore_whitespace: If set to ``True``, would not show whitespace
496 499 changes. Defaults to ``False``.
497 500 :param context: How many lines before/after changed lines should be
498 501 shown. Defaults to ``3``.
499 502 :param path1: Can be set to a path to associate with `commit1`. This
500 503 parameter works only for backends which support diff generation for
501 504 different paths. Other backends will raise a `ValueError` if `path1`
502 505 is set and has a different value than `path`.
503 506 :param file_path: filter this diff by given path pattern
504 507 """
505 508 raise NotImplementedError
506 509
507 510 def strip(self, commit_id, branch=None):
508 511 """
509 512 Strip given commit_id from the repository
510 513 """
511 514 raise NotImplementedError
512 515
513 516 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
514 517 """
515 518 Return a latest common ancestor commit if one exists for this repo
516 519 `commit_id1` vs `commit_id2` from `repo2`.
517 520
518 521 :param commit_id1: Commit it from this repository to use as a
519 522 target for the comparison.
520 523 :param commit_id2: Source commit id to use for comparison.
521 524 :param repo2: Source repository to use for comparison.
522 525 """
523 526 raise NotImplementedError
524 527
525 528 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
526 529 """
527 530 Compare this repository's revision `commit_id1` with `commit_id2`.
528 531
529 532 Returns a tuple(commits, ancestor) that would be merged from
530 533 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
531 534 will be returned as ancestor.
532 535
533 536 :param commit_id1: Commit it from this repository to use as a
534 537 target for the comparison.
535 538 :param commit_id2: Source commit id to use for comparison.
536 539 :param repo2: Source repository to use for comparison.
537 540 :param merge: If set to ``True`` will do a merge compare which also
538 541 returns the common ancestor.
539 542 :param pre_load: Optional. List of commit attributes to load.
540 543 """
541 544 raise NotImplementedError
542 545
543 546 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
544 547 user_name='', user_email='', message='', dry_run=False,
545 548 use_rebase=False, close_branch=False):
546 549 """
547 550 Merge the revisions specified in `source_ref` from `source_repo`
548 551 onto the `target_ref` of this repository.
549 552
550 553 `source_ref` and `target_ref` are named tupls with the following
551 554 fields `type`, `name` and `commit_id`.
552 555
553 556 Returns a MergeResponse named tuple with the following fields
554 557 'possible', 'executed', 'source_commit', 'target_commit',
555 558 'merge_commit'.
556 559
557 560 :param repo_id: `repo_id` target repo id.
558 561 :param workspace_id: `workspace_id` unique identifier.
559 562 :param target_ref: `target_ref` points to the commit on top of which
560 563 the `source_ref` should be merged.
561 564 :param source_repo: The repository that contains the commits to be
562 565 merged.
563 566 :param source_ref: `source_ref` points to the topmost commit from
564 567 the `source_repo` which should be merged.
565 568 :param user_name: Merge commit `user_name`.
566 569 :param user_email: Merge commit `user_email`.
567 570 :param message: Merge commit `message`.
568 571 :param dry_run: If `True` the merge will not take place.
569 572 :param use_rebase: If `True` commits from the source will be rebased
570 573 on top of the target instead of being merged.
571 574 :param close_branch: If `True` branch will be close before merging it
572 575 """
573 576 if dry_run:
574 577 message = message or settings.MERGE_DRY_RUN_MESSAGE
575 578 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
576 579 user_name = user_name or settings.MERGE_DRY_RUN_USER
577 580 else:
578 581 if not user_name:
579 582 raise ValueError('user_name cannot be empty')
580 583 if not user_email:
581 584 raise ValueError('user_email cannot be empty')
582 585 if not message:
583 586 raise ValueError('message cannot be empty')
584 587
585 588 try:
586 589 return self._merge_repo(
587 590 repo_id, workspace_id, target_ref, source_repo,
588 591 source_ref, message, user_name, user_email, dry_run=dry_run,
589 592 use_rebase=use_rebase, close_branch=close_branch)
590 593 except RepositoryError as exc:
591 594 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
592 595 return MergeResponse(
593 596 False, False, None, MergeFailureReason.UNKNOWN,
594 597 metadata={'exception': str(exc)})
595 598
596 599 def _merge_repo(self, repo_id, workspace_id, target_ref,
597 600 source_repo, source_ref, merge_message,
598 601 merger_name, merger_email, dry_run=False,
599 602 use_rebase=False, close_branch=False):
600 603 """Internal implementation of merge."""
601 604 raise NotImplementedError
602 605
603 606 def _maybe_prepare_merge_workspace(
604 607 self, repo_id, workspace_id, target_ref, source_ref):
605 608 """
606 609 Create the merge workspace.
607 610
608 611 :param workspace_id: `workspace_id` unique identifier.
609 612 """
610 613 raise NotImplementedError
611 614
612 615 def _get_legacy_shadow_repository_path(self, workspace_id):
613 616 """
614 617 Legacy version that was used before. We still need it for
615 618 backward compat
616 619 """
617 620 return os.path.join(
618 621 os.path.dirname(self.path),
619 622 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
620 623
621 624 def _get_shadow_repository_path(self, repo_id, workspace_id):
622 625 # The name of the shadow repository must start with '.', so it is
623 626 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
624 627 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
625 628 if os.path.exists(legacy_repository_path):
626 629 return legacy_repository_path
627 630 else:
628 631 return os.path.join(
629 632 os.path.dirname(self.path),
630 633 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
631 634
632 635 def cleanup_merge_workspace(self, repo_id, workspace_id):
633 636 """
634 637 Remove merge workspace.
635 638
636 639 This function MUST not fail in case there is no workspace associated to
637 640 the given `workspace_id`.
638 641
639 642 :param workspace_id: `workspace_id` unique identifier.
640 643 """
641 644 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
642 645 shadow_repository_path_del = '{}.{}.delete'.format(
643 646 shadow_repository_path, time.time())
644 647
645 648 # move the shadow repo, so it never conflicts with the one used.
646 649 # we use this method because shutil.rmtree had some edge case problems
647 650 # removing symlinked repositories
648 651 if not os.path.isdir(shadow_repository_path):
649 652 return
650 653
651 654 shutil.move(shadow_repository_path, shadow_repository_path_del)
652 655 try:
653 656 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
654 657 except Exception:
655 658 log.exception('Failed to gracefully remove shadow repo under %s',
656 659 shadow_repository_path_del)
657 660 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
658 661
659 662 # ========== #
660 663 # COMMIT API #
661 664 # ========== #
662 665
663 666 @LazyProperty
664 667 def in_memory_commit(self):
665 668 """
666 669 Returns :class:`InMemoryCommit` object for this repository.
667 670 """
668 671 raise NotImplementedError
669 672
670 673 # ======================== #
671 674 # UTILITIES FOR SUBCLASSES #
672 675 # ======================== #
673 676
674 677 def _validate_diff_commits(self, commit1, commit2):
675 678 """
676 679 Validates that the given commits are related to this repository.
677 680
678 681 Intended as a utility for sub classes to have a consistent validation
679 682 of input parameters in methods like :meth:`get_diff`.
680 683 """
681 684 self._validate_commit(commit1)
682 685 self._validate_commit(commit2)
683 686 if (isinstance(commit1, EmptyCommit) and
684 687 isinstance(commit2, EmptyCommit)):
685 688 raise ValueError("Cannot compare two empty commits")
686 689
687 690 def _validate_commit(self, commit):
688 691 if not isinstance(commit, BaseCommit):
689 692 raise TypeError(
690 693 "%s is not of type BaseCommit" % repr(commit))
691 694 if commit.repository != self and not isinstance(commit, EmptyCommit):
692 695 raise ValueError(
693 696 "Commit %s must be a valid commit from this repository %s, "
694 697 "related to this repository instead %s." %
695 698 (commit, self, commit.repository))
696 699
697 700 def _validate_commit_id(self, commit_id):
698 701 if not isinstance(commit_id, compat.string_types):
699 702 raise TypeError("commit_id must be a string value")
700 703
701 704 def _validate_commit_idx(self, commit_idx):
702 705 if not isinstance(commit_idx, (int, long)):
703 706 raise TypeError("commit_idx must be a numeric value")
704 707
705 708 def _validate_branch_name(self, branch_name):
706 709 if branch_name and branch_name not in self.branches_all:
707 710 msg = ("Branch %s not found in %s" % (branch_name, self))
708 711 raise BranchDoesNotExistError(msg)
709 712
710 713 #
711 714 # Supporting deprecated API parts
712 715 # TODO: johbo: consider to move this into a mixin
713 716 #
714 717
715 718 @property
716 719 def EMPTY_CHANGESET(self):
717 720 warnings.warn(
718 721 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
719 722 return self.EMPTY_COMMIT_ID
720 723
721 724 @property
722 725 def revisions(self):
723 726 warnings.warn("Use commits attribute instead", DeprecationWarning)
724 727 return self.commit_ids
725 728
726 729 @revisions.setter
727 730 def revisions(self, value):
728 731 warnings.warn("Use commits attribute instead", DeprecationWarning)
729 732 self.commit_ids = value
730 733
731 734 def get_changeset(self, revision=None, pre_load=None):
732 735 warnings.warn("Use get_commit instead", DeprecationWarning)
733 736 commit_id = None
734 737 commit_idx = None
735 738 if isinstance(revision, compat.string_types):
736 739 commit_id = revision
737 740 else:
738 741 commit_idx = revision
739 742 return self.get_commit(
740 743 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
741 744
742 745 def get_changesets(
743 746 self, start=None, end=None, start_date=None, end_date=None,
744 747 branch_name=None, pre_load=None):
745 748 warnings.warn("Use get_commits instead", DeprecationWarning)
746 749 start_id = self._revision_to_commit(start)
747 750 end_id = self._revision_to_commit(end)
748 751 return self.get_commits(
749 752 start_id=start_id, end_id=end_id, start_date=start_date,
750 753 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
751 754
752 755 def _revision_to_commit(self, revision):
753 756 """
754 757 Translates a revision to a commit_id
755 758
756 759 Helps to support the old changeset based API which allows to use
757 760 commit ids and commit indices interchangeable.
758 761 """
759 762 if revision is None:
760 763 return revision
761 764
762 765 if isinstance(revision, compat.string_types):
763 766 commit_id = revision
764 767 else:
765 768 commit_id = self.commit_ids[revision]
766 769 return commit_id
767 770
768 771 @property
769 772 def in_memory_changeset(self):
770 773 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
771 774 return self.in_memory_commit
772 775
773 776 def get_path_permissions(self, username):
774 777 """
775 778 Returns a path permission checker or None if not supported
776 779
777 780 :param username: session user name
778 781 :return: an instance of BasePathPermissionChecker or None
779 782 """
780 783 return None
781 784
782 785 def install_hooks(self, force=False):
783 786 return self._remote.install_hooks(force)
784 787
785 788 def get_hooks_info(self):
786 789 return self._remote.get_hooks_info()
787 790
788 791
789 792 class BaseCommit(object):
790 793 """
791 794 Each backend should implement it's commit representation.
792 795
793 796 **Attributes**
794 797
795 798 ``repository``
796 799 repository object within which commit exists
797 800
798 801 ``id``
799 802 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
800 803 just ``tip``.
801 804
802 805 ``raw_id``
803 806 raw commit representation (i.e. full 40 length sha for git
804 807 backend)
805 808
806 809 ``short_id``
807 810 shortened (if apply) version of ``raw_id``; it would be simple
808 811 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
809 812 as ``raw_id`` for subversion
810 813
811 814 ``idx``
812 815 commit index
813 816
814 817 ``files``
815 818 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
816 819
817 820 ``dirs``
818 821 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
819 822
820 823 ``nodes``
821 824 combined list of ``Node`` objects
822 825
823 826 ``author``
824 827 author of the commit, as unicode
825 828
826 829 ``message``
827 830 message of the commit, as unicode
828 831
829 832 ``parents``
830 833 list of parent commits
831 834
832 835 """
833 836
834 837 branch = None
835 838 """
836 839 Depending on the backend this should be set to the branch name of the
837 840 commit. Backends not supporting branches on commits should leave this
838 841 value as ``None``.
839 842 """
840 843
841 844 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
842 845 """
843 846 This template is used to generate a default prefix for repository archives
844 847 if no prefix has been specified.
845 848 """
846 849
847 850 def __str__(self):
848 851 return '<%s at %s:%s>' % (
849 852 self.__class__.__name__, self.idx, self.short_id)
850 853
851 854 def __repr__(self):
852 855 return self.__str__()
853 856
854 857 def __unicode__(self):
855 858 return u'%s:%s' % (self.idx, self.short_id)
856 859
857 860 def __eq__(self, other):
858 861 same_instance = isinstance(other, self.__class__)
859 862 return same_instance and self.raw_id == other.raw_id
860 863
861 864 def __json__(self):
862 865 parents = []
863 866 try:
864 867 for parent in self.parents:
865 868 parents.append({'raw_id': parent.raw_id})
866 869 except NotImplementedError:
867 870 # empty commit doesn't have parents implemented
868 871 pass
869 872
870 873 return {
871 874 'short_id': self.short_id,
872 875 'raw_id': self.raw_id,
873 876 'revision': self.idx,
874 877 'message': self.message,
875 878 'date': self.date,
876 879 'author': self.author,
877 880 'parents': parents,
878 881 'branch': self.branch
879 882 }
880 883
881 884 def __getstate__(self):
882 885 d = self.__dict__.copy()
883 886 d.pop('_remote', None)
884 887 d.pop('repository', None)
885 888 return d
886 889
887 890 def _get_refs(self):
888 891 return {
889 892 'branches': [self.branch] if self.branch else [],
890 893 'bookmarks': getattr(self, 'bookmarks', []),
891 894 'tags': self.tags
892 895 }
893 896
894 897 @LazyProperty
895 898 def last(self):
896 899 """
897 900 ``True`` if this is last commit in repository, ``False``
898 901 otherwise; trying to access this attribute while there is no
899 902 commits would raise `EmptyRepositoryError`
900 903 """
901 904 if self.repository is None:
902 905 raise CommitError("Cannot check if it's most recent commit")
903 906 return self.raw_id == self.repository.commit_ids[-1]
904 907
905 908 @LazyProperty
906 909 def parents(self):
907 910 """
908 911 Returns list of parent commits.
909 912 """
910 913 raise NotImplementedError
911 914
912 915 @LazyProperty
913 916 def first_parent(self):
914 917 """
915 918 Returns list of parent commits.
916 919 """
917 920 return self.parents[0] if self.parents else EmptyCommit()
918 921
919 922 @property
920 923 def merge(self):
921 924 """
922 925 Returns boolean if commit is a merge.
923 926 """
924 927 return len(self.parents) > 1
925 928
926 929 @LazyProperty
927 930 def children(self):
928 931 """
929 932 Returns list of child commits.
930 933 """
931 934 raise NotImplementedError
932 935
933 936 @LazyProperty
934 937 def id(self):
935 938 """
936 939 Returns string identifying this commit.
937 940 """
938 941 raise NotImplementedError
939 942
940 943 @LazyProperty
941 944 def raw_id(self):
942 945 """
943 946 Returns raw string identifying this commit.
944 947 """
945 948 raise NotImplementedError
946 949
947 950 @LazyProperty
948 951 def short_id(self):
949 952 """
950 953 Returns shortened version of ``raw_id`` attribute, as string,
951 954 identifying this commit, useful for presentation to users.
952 955 """
953 956 raise NotImplementedError
954 957
955 958 @LazyProperty
956 959 def idx(self):
957 960 """
958 961 Returns integer identifying this commit.
959 962 """
960 963 raise NotImplementedError
961 964
962 965 @LazyProperty
963 966 def committer(self):
964 967 """
965 968 Returns committer for this commit
966 969 """
967 970 raise NotImplementedError
968 971
969 972 @LazyProperty
970 973 def committer_name(self):
971 974 """
972 975 Returns committer name for this commit
973 976 """
974 977
975 978 return author_name(self.committer)
976 979
977 980 @LazyProperty
978 981 def committer_email(self):
979 982 """
980 983 Returns committer email address for this commit
981 984 """
982 985
983 986 return author_email(self.committer)
984 987
985 988 @LazyProperty
986 989 def author(self):
987 990 """
988 991 Returns author for this commit
989 992 """
990 993
991 994 raise NotImplementedError
992 995
993 996 @LazyProperty
994 997 def author_name(self):
995 998 """
996 999 Returns author name for this commit
997 1000 """
998 1001
999 1002 return author_name(self.author)
1000 1003
1001 1004 @LazyProperty
1002 1005 def author_email(self):
1003 1006 """
1004 1007 Returns author email address for this commit
1005 1008 """
1006 1009
1007 1010 return author_email(self.author)
1008 1011
1009 1012 def get_file_mode(self, path):
1010 1013 """
1011 1014 Returns stat mode of the file at `path`.
1012 1015 """
1013 1016 raise NotImplementedError
1014 1017
1015 1018 def is_link(self, path):
1016 1019 """
1017 1020 Returns ``True`` if given `path` is a symlink
1018 1021 """
1019 1022 raise NotImplementedError
1020 1023
1021 1024 def get_file_content(self, path):
1022 1025 """
1023 1026 Returns content of the file at the given `path`.
1024 1027 """
1025 1028 raise NotImplementedError
1026 1029
1027 1030 def get_file_size(self, path):
1028 1031 """
1029 1032 Returns size of the file at the given `path`.
1030 1033 """
1031 1034 raise NotImplementedError
1032 1035
1033 1036 def get_path_commit(self, path, pre_load=None):
1034 1037 """
1035 1038 Returns last commit of the file at the given `path`.
1036 1039
1037 1040 :param pre_load: Optional. List of commit attributes to load.
1038 1041 """
1039 1042 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1040 1043 if not commits:
1041 1044 raise RepositoryError(
1042 1045 'Failed to fetch history for path {}. '
1043 1046 'Please check if such path exists in your repository'.format(
1044 1047 path))
1045 1048 return commits[0]
1046 1049
1047 1050 def get_path_history(self, path, limit=None, pre_load=None):
1048 1051 """
1049 1052 Returns history of file as reversed list of :class:`BaseCommit`
1050 1053 objects for which file at given `path` has been modified.
1051 1054
1052 1055 :param limit: Optional. Allows to limit the size of the returned
1053 1056 history. This is intended as a hint to the underlying backend, so
1054 1057 that it can apply optimizations depending on the limit.
1055 1058 :param pre_load: Optional. List of commit attributes to load.
1056 1059 """
1057 1060 raise NotImplementedError
1058 1061
1059 1062 def get_file_annotate(self, path, pre_load=None):
1060 1063 """
1061 1064 Returns a generator of four element tuples with
1062 1065 lineno, sha, commit lazy loader and line
1063 1066
1064 1067 :param pre_load: Optional. List of commit attributes to load.
1065 1068 """
1066 1069 raise NotImplementedError
1067 1070
1068 1071 def get_nodes(self, path):
1069 1072 """
1070 1073 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1071 1074 state of commit at the given ``path``.
1072 1075
1073 1076 :raises ``CommitError``: if node at the given ``path`` is not
1074 1077 instance of ``DirNode``
1075 1078 """
1076 1079 raise NotImplementedError
1077 1080
1078 1081 def get_node(self, path):
1079 1082 """
1080 1083 Returns ``Node`` object from the given ``path``.
1081 1084
1082 1085 :raises ``NodeDoesNotExistError``: if there is no node at the given
1083 1086 ``path``
1084 1087 """
1085 1088 raise NotImplementedError
1086 1089
1087 1090 def get_largefile_node(self, path):
1088 1091 """
1089 1092 Returns the path to largefile from Mercurial/Git-lfs storage.
1090 1093 or None if it's not a largefile node
1091 1094 """
1092 1095 return None
1093 1096
1094 1097 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1095 1098 prefix=None, write_metadata=False, mtime=None):
1096 1099 """
1097 1100 Creates an archive containing the contents of the repository.
1098 1101
1099 1102 :param file_path: path to the file which to create the archive.
1100 1103 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1101 1104 :param prefix: name of root directory in archive.
1102 1105 Default is repository name and commit's short_id joined with dash:
1103 1106 ``"{repo_name}-{short_id}"``.
1104 1107 :param write_metadata: write a metadata file into archive.
1105 1108 :param mtime: custom modification time for archive creation, defaults
1106 1109 to time.time() if not given.
1107 1110
1108 1111 :raise VCSError: If prefix has a problem.
1109 1112 """
1110 1113 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1111 1114 if kind not in allowed_kinds:
1112 1115 raise ImproperArchiveTypeError(
1113 1116 'Archive kind (%s) not supported use one of %s' %
1114 1117 (kind, allowed_kinds))
1115 1118
1116 1119 prefix = self._validate_archive_prefix(prefix)
1117 1120
1118 1121 mtime = mtime or time.mktime(self.date.timetuple())
1119 1122
1120 1123 file_info = []
1121 1124 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1122 1125 for _r, _d, files in cur_rev.walk('/'):
1123 1126 for f in files:
1124 1127 f_path = os.path.join(prefix, f.path)
1125 1128 file_info.append(
1126 1129 (f_path, f.mode, f.is_link(), f.raw_bytes))
1127 1130
1128 1131 if write_metadata:
1129 1132 metadata = [
1130 1133 ('repo_name', self.repository.name),
1131 1134 ('rev', self.raw_id),
1132 1135 ('create_time', mtime),
1133 1136 ('branch', self.branch),
1134 1137 ('tags', ','.join(self.tags)),
1135 1138 ]
1136 1139 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1137 1140 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1138 1141
1139 1142 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1140 1143
1141 1144 def _validate_archive_prefix(self, prefix):
1142 1145 if prefix is None:
1143 1146 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1144 1147 repo_name=safe_str(self.repository.name),
1145 1148 short_id=self.short_id)
1146 1149 elif not isinstance(prefix, str):
1147 1150 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1148 1151 elif prefix.startswith('/'):
1149 1152 raise VCSError("Prefix cannot start with leading slash")
1150 1153 elif prefix.strip() == '':
1151 1154 raise VCSError("Prefix cannot be empty")
1152 1155 return prefix
1153 1156
1154 1157 @LazyProperty
1155 1158 def root(self):
1156 1159 """
1157 1160 Returns ``RootNode`` object for this commit.
1158 1161 """
1159 1162 return self.get_node('')
1160 1163
1161 1164 def next(self, branch=None):
1162 1165 """
1163 1166 Returns next commit from current, if branch is gives it will return
1164 1167 next commit belonging to this branch
1165 1168
1166 1169 :param branch: show commits within the given named branch
1167 1170 """
1168 1171 indexes = xrange(self.idx + 1, self.repository.count())
1169 1172 return self._find_next(indexes, branch)
1170 1173
1171 1174 def prev(self, branch=None):
1172 1175 """
1173 1176 Returns previous commit from current, if branch is gives it will
1174 1177 return previous commit belonging to this branch
1175 1178
1176 1179 :param branch: show commit within the given named branch
1177 1180 """
1178 1181 indexes = xrange(self.idx - 1, -1, -1)
1179 1182 return self._find_next(indexes, branch)
1180 1183
1181 1184 def _find_next(self, indexes, branch=None):
1182 1185 if branch and self.branch != branch:
1183 1186 raise VCSError('Branch option used on commit not belonging '
1184 1187 'to that branch')
1185 1188
1186 1189 for next_idx in indexes:
1187 1190 commit = self.repository.get_commit(commit_idx=next_idx)
1188 1191 if branch and branch != commit.branch:
1189 1192 continue
1190 1193 return commit
1191 1194 raise CommitDoesNotExistError
1192 1195
1193 1196 def diff(self, ignore_whitespace=True, context=3):
1194 1197 """
1195 1198 Returns a `Diff` object representing the change made by this commit.
1196 1199 """
1197 1200 parent = self.first_parent
1198 1201 diff = self.repository.get_diff(
1199 1202 parent, self,
1200 1203 ignore_whitespace=ignore_whitespace,
1201 1204 context=context)
1202 1205 return diff
1203 1206
1204 1207 @LazyProperty
1205 1208 def added(self):
1206 1209 """
1207 1210 Returns list of added ``FileNode`` objects.
1208 1211 """
1209 1212 raise NotImplementedError
1210 1213
1211 1214 @LazyProperty
1212 1215 def changed(self):
1213 1216 """
1214 1217 Returns list of modified ``FileNode`` objects.
1215 1218 """
1216 1219 raise NotImplementedError
1217 1220
1218 1221 @LazyProperty
1219 1222 def removed(self):
1220 1223 """
1221 1224 Returns list of removed ``FileNode`` objects.
1222 1225 """
1223 1226 raise NotImplementedError
1224 1227
1225 1228 @LazyProperty
1226 1229 def size(self):
1227 1230 """
1228 1231 Returns total number of bytes from contents of all filenodes.
1229 1232 """
1230 1233 return sum((node.size for node in self.get_filenodes_generator()))
1231 1234
1232 1235 def walk(self, topurl=''):
1233 1236 """
1234 1237 Similar to os.walk method. Insted of filesystem it walks through
1235 1238 commit starting at given ``topurl``. Returns generator of tuples
1236 1239 (topnode, dirnodes, filenodes).
1237 1240 """
1238 1241 topnode = self.get_node(topurl)
1239 1242 if not topnode.is_dir():
1240 1243 return
1241 1244 yield (topnode, topnode.dirs, topnode.files)
1242 1245 for dirnode in topnode.dirs:
1243 1246 for tup in self.walk(dirnode.path):
1244 1247 yield tup
1245 1248
1246 1249 def get_filenodes_generator(self):
1247 1250 """
1248 1251 Returns generator that yields *all* file nodes.
1249 1252 """
1250 1253 for topnode, dirs, files in self.walk():
1251 1254 for node in files:
1252 1255 yield node
1253 1256
1254 1257 #
1255 1258 # Utilities for sub classes to support consistent behavior
1256 1259 #
1257 1260
1258 1261 def no_node_at_path(self, path):
1259 1262 return NodeDoesNotExistError(
1260 1263 u"There is no file nor directory at the given path: "
1261 1264 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1262 1265
1263 1266 def _fix_path(self, path):
1264 1267 """
1265 1268 Paths are stored without trailing slash so we need to get rid off it if
1266 1269 needed.
1267 1270 """
1268 1271 return path.rstrip('/')
1269 1272
1270 1273 #
1271 1274 # Deprecated API based on changesets
1272 1275 #
1273 1276
1274 1277 @property
1275 1278 def revision(self):
1276 1279 warnings.warn("Use idx instead", DeprecationWarning)
1277 1280 return self.idx
1278 1281
1279 1282 @revision.setter
1280 1283 def revision(self, value):
1281 1284 warnings.warn("Use idx instead", DeprecationWarning)
1282 1285 self.idx = value
1283 1286
1284 1287 def get_file_changeset(self, path):
1285 1288 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1286 1289 return self.get_path_commit(path)
1287 1290
1288 1291
1289 1292 class BaseChangesetClass(type):
1290 1293
1291 1294 def __instancecheck__(self, instance):
1292 1295 return isinstance(instance, BaseCommit)
1293 1296
1294 1297
1295 1298 class BaseChangeset(BaseCommit):
1296 1299
1297 1300 __metaclass__ = BaseChangesetClass
1298 1301
1299 1302 def __new__(cls, *args, **kwargs):
1300 1303 warnings.warn(
1301 1304 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1302 1305 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1303 1306
1304 1307
1305 1308 class BaseInMemoryCommit(object):
1306 1309 """
1307 1310 Represents differences between repository's state (most recent head) and
1308 1311 changes made *in place*.
1309 1312
1310 1313 **Attributes**
1311 1314
1312 1315 ``repository``
1313 1316 repository object for this in-memory-commit
1314 1317
1315 1318 ``added``
1316 1319 list of ``FileNode`` objects marked as *added*
1317 1320
1318 1321 ``changed``
1319 1322 list of ``FileNode`` objects marked as *changed*
1320 1323
1321 1324 ``removed``
1322 1325 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1323 1326 *removed*
1324 1327
1325 1328 ``parents``
1326 1329 list of :class:`BaseCommit` instances representing parents of
1327 1330 in-memory commit. Should always be 2-element sequence.
1328 1331
1329 1332 """
1330 1333
1331 1334 def __init__(self, repository):
1332 1335 self.repository = repository
1333 1336 self.added = []
1334 1337 self.changed = []
1335 1338 self.removed = []
1336 1339 self.parents = []
1337 1340
1338 1341 def add(self, *filenodes):
1339 1342 """
1340 1343 Marks given ``FileNode`` objects as *to be committed*.
1341 1344
1342 1345 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1343 1346 latest commit
1344 1347 :raises ``NodeAlreadyAddedError``: if node with same path is already
1345 1348 marked as *added*
1346 1349 """
1347 1350 # Check if not already marked as *added* first
1348 1351 for node in filenodes:
1349 1352 if node.path in (n.path for n in self.added):
1350 1353 raise NodeAlreadyAddedError(
1351 1354 "Such FileNode %s is already marked for addition"
1352 1355 % node.path)
1353 1356 for node in filenodes:
1354 1357 self.added.append(node)
1355 1358
1356 1359 def change(self, *filenodes):
1357 1360 """
1358 1361 Marks given ``FileNode`` objects to be *changed* in next commit.
1359 1362
1360 1363 :raises ``EmptyRepositoryError``: if there are no commits yet
1361 1364 :raises ``NodeAlreadyExistsError``: if node with same path is already
1362 1365 marked to be *changed*
1363 1366 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1364 1367 marked to be *removed*
1365 1368 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1366 1369 commit
1367 1370 :raises ``NodeNotChangedError``: if node hasn't really be changed
1368 1371 """
1369 1372 for node in filenodes:
1370 1373 if node.path in (n.path for n in self.removed):
1371 1374 raise NodeAlreadyRemovedError(
1372 1375 "Node at %s is already marked as removed" % node.path)
1373 1376 try:
1374 1377 self.repository.get_commit()
1375 1378 except EmptyRepositoryError:
1376 1379 raise EmptyRepositoryError(
1377 1380 "Nothing to change - try to *add* new nodes rather than "
1378 1381 "changing them")
1379 1382 for node in filenodes:
1380 1383 if node.path in (n.path for n in self.changed):
1381 1384 raise NodeAlreadyChangedError(
1382 1385 "Node at '%s' is already marked as changed" % node.path)
1383 1386 self.changed.append(node)
1384 1387
1385 1388 def remove(self, *filenodes):
1386 1389 """
1387 1390 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1388 1391 *removed* in next commit.
1389 1392
1390 1393 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1391 1394 be *removed*
1392 1395 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1393 1396 be *changed*
1394 1397 """
1395 1398 for node in filenodes:
1396 1399 if node.path in (n.path for n in self.removed):
1397 1400 raise NodeAlreadyRemovedError(
1398 1401 "Node is already marked to for removal at %s" % node.path)
1399 1402 if node.path in (n.path for n in self.changed):
1400 1403 raise NodeAlreadyChangedError(
1401 1404 "Node is already marked to be changed at %s" % node.path)
1402 1405 # We only mark node as *removed* - real removal is done by
1403 1406 # commit method
1404 1407 self.removed.append(node)
1405 1408
1406 1409 def reset(self):
1407 1410 """
1408 1411 Resets this instance to initial state (cleans ``added``, ``changed``
1409 1412 and ``removed`` lists).
1410 1413 """
1411 1414 self.added = []
1412 1415 self.changed = []
1413 1416 self.removed = []
1414 1417 self.parents = []
1415 1418
1416 1419 def get_ipaths(self):
1417 1420 """
1418 1421 Returns generator of paths from nodes marked as added, changed or
1419 1422 removed.
1420 1423 """
1421 1424 for node in itertools.chain(self.added, self.changed, self.removed):
1422 1425 yield node.path
1423 1426
1424 1427 def get_paths(self):
1425 1428 """
1426 1429 Returns list of paths from nodes marked as added, changed or removed.
1427 1430 """
1428 1431 return list(self.get_ipaths())
1429 1432
1430 1433 def check_integrity(self, parents=None):
1431 1434 """
1432 1435 Checks in-memory commit's integrity. Also, sets parents if not
1433 1436 already set.
1434 1437
1435 1438 :raises CommitError: if any error occurs (i.e.
1436 1439 ``NodeDoesNotExistError``).
1437 1440 """
1438 1441 if not self.parents:
1439 1442 parents = parents or []
1440 1443 if len(parents) == 0:
1441 1444 try:
1442 1445 parents = [self.repository.get_commit(), None]
1443 1446 except EmptyRepositoryError:
1444 1447 parents = [None, None]
1445 1448 elif len(parents) == 1:
1446 1449 parents += [None]
1447 1450 self.parents = parents
1448 1451
1449 1452 # Local parents, only if not None
1450 1453 parents = [p for p in self.parents if p]
1451 1454
1452 1455 # Check nodes marked as added
1453 1456 for p in parents:
1454 1457 for node in self.added:
1455 1458 try:
1456 1459 p.get_node(node.path)
1457 1460 except NodeDoesNotExistError:
1458 1461 pass
1459 1462 else:
1460 1463 raise NodeAlreadyExistsError(
1461 1464 "Node `%s` already exists at %s" % (node.path, p))
1462 1465
1463 1466 # Check nodes marked as changed
1464 1467 missing = set(self.changed)
1465 1468 not_changed = set(self.changed)
1466 1469 if self.changed and not parents:
1467 1470 raise NodeDoesNotExistError(str(self.changed[0].path))
1468 1471 for p in parents:
1469 1472 for node in self.changed:
1470 1473 try:
1471 1474 old = p.get_node(node.path)
1472 1475 missing.remove(node)
1473 1476 # if content actually changed, remove node from not_changed
1474 1477 if old.content != node.content:
1475 1478 not_changed.remove(node)
1476 1479 except NodeDoesNotExistError:
1477 1480 pass
1478 1481 if self.changed and missing:
1479 1482 raise NodeDoesNotExistError(
1480 1483 "Node `%s` marked as modified but missing in parents: %s"
1481 1484 % (node.path, parents))
1482 1485
1483 1486 if self.changed and not_changed:
1484 1487 raise NodeNotChangedError(
1485 1488 "Node `%s` wasn't actually changed (parents: %s)"
1486 1489 % (not_changed.pop().path, parents))
1487 1490
1488 1491 # Check nodes marked as removed
1489 1492 if self.removed and not parents:
1490 1493 raise NodeDoesNotExistError(
1491 1494 "Cannot remove node at %s as there "
1492 1495 "were no parents specified" % self.removed[0].path)
1493 1496 really_removed = set()
1494 1497 for p in parents:
1495 1498 for node in self.removed:
1496 1499 try:
1497 1500 p.get_node(node.path)
1498 1501 really_removed.add(node)
1499 1502 except CommitError:
1500 1503 pass
1501 1504 not_removed = set(self.removed) - really_removed
1502 1505 if not_removed:
1503 1506 # TODO: johbo: This code branch does not seem to be covered
1504 1507 raise NodeDoesNotExistError(
1505 1508 "Cannot remove node at %s from "
1506 1509 "following parents: %s" % (not_removed, parents))
1507 1510
1508 1511 def commit(
1509 1512 self, message, author, parents=None, branch=None, date=None,
1510 1513 **kwargs):
1511 1514 """
1512 1515 Performs in-memory commit (doesn't check workdir in any way) and
1513 1516 returns newly created :class:`BaseCommit`. Updates repository's
1514 1517 attribute `commits`.
1515 1518
1516 1519 .. note::
1517 1520
1518 1521 While overriding this method each backend's should call
1519 1522 ``self.check_integrity(parents)`` in the first place.
1520 1523
1521 1524 :param message: message of the commit
1522 1525 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1523 1526 :param parents: single parent or sequence of parents from which commit
1524 1527 would be derived
1525 1528 :param date: ``datetime.datetime`` instance. Defaults to
1526 1529 ``datetime.datetime.now()``.
1527 1530 :param branch: branch name, as string. If none given, default backend's
1528 1531 branch would be used.
1529 1532
1530 1533 :raises ``CommitError``: if any error occurs while committing
1531 1534 """
1532 1535 raise NotImplementedError
1533 1536
1534 1537
1535 1538 class BaseInMemoryChangesetClass(type):
1536 1539
1537 1540 def __instancecheck__(self, instance):
1538 1541 return isinstance(instance, BaseInMemoryCommit)
1539 1542
1540 1543
1541 1544 class BaseInMemoryChangeset(BaseInMemoryCommit):
1542 1545
1543 1546 __metaclass__ = BaseInMemoryChangesetClass
1544 1547
1545 1548 def __new__(cls, *args, **kwargs):
1546 1549 warnings.warn(
1547 1550 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1548 1551 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1549 1552
1550 1553
1551 1554 class EmptyCommit(BaseCommit):
1552 1555 """
1553 1556 An dummy empty commit. It's possible to pass hash when creating
1554 1557 an EmptyCommit
1555 1558 """
1556 1559
1557 1560 def __init__(
1558 1561 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1559 1562 message='', author='', date=None):
1560 1563 self._empty_commit_id = commit_id
1561 1564 # TODO: johbo: Solve idx parameter, default value does not make
1562 1565 # too much sense
1563 1566 self.idx = idx
1564 1567 self.message = message
1565 1568 self.author = author
1566 1569 self.date = date or datetime.datetime.fromtimestamp(0)
1567 1570 self.repository = repo
1568 1571 self.alias = alias
1569 1572
1570 1573 @LazyProperty
1571 1574 def raw_id(self):
1572 1575 """
1573 1576 Returns raw string identifying this commit, useful for web
1574 1577 representation.
1575 1578 """
1576 1579
1577 1580 return self._empty_commit_id
1578 1581
1579 1582 @LazyProperty
1580 1583 def branch(self):
1581 1584 if self.alias:
1582 1585 from rhodecode.lib.vcs.backends import get_backend
1583 1586 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1584 1587
1585 1588 @LazyProperty
1586 1589 def short_id(self):
1587 1590 return self.raw_id[:12]
1588 1591
1589 1592 @LazyProperty
1590 1593 def id(self):
1591 1594 return self.raw_id
1592 1595
1593 1596 def get_path_commit(self, path):
1594 1597 return self
1595 1598
1596 1599 def get_file_content(self, path):
1597 1600 return u''
1598 1601
1599 1602 def get_file_size(self, path):
1600 1603 return 0
1601 1604
1602 1605
1603 1606 class EmptyChangesetClass(type):
1604 1607
1605 1608 def __instancecheck__(self, instance):
1606 1609 return isinstance(instance, EmptyCommit)
1607 1610
1608 1611
1609 1612 class EmptyChangeset(EmptyCommit):
1610 1613
1611 1614 __metaclass__ = EmptyChangesetClass
1612 1615
1613 1616 def __new__(cls, *args, **kwargs):
1614 1617 warnings.warn(
1615 1618 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1616 1619 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1617 1620
1618 1621 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1619 1622 alias=None, revision=-1, message='', author='', date=None):
1620 1623 if requested_revision is not None:
1621 1624 warnings.warn(
1622 1625 "Parameter requested_revision not supported anymore",
1623 1626 DeprecationWarning)
1624 1627 super(EmptyChangeset, self).__init__(
1625 1628 commit_id=cs, repo=repo, alias=alias, idx=revision,
1626 1629 message=message, author=author, date=date)
1627 1630
1628 1631 @property
1629 1632 def revision(self):
1630 1633 warnings.warn("Use idx instead", DeprecationWarning)
1631 1634 return self.idx
1632 1635
1633 1636 @revision.setter
1634 1637 def revision(self, value):
1635 1638 warnings.warn("Use idx instead", DeprecationWarning)
1636 1639 self.idx = value
1637 1640
1638 1641
1639 1642 class EmptyRepository(BaseRepository):
1640 1643 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1641 1644 pass
1642 1645
1643 1646 def get_diff(self, *args, **kwargs):
1644 1647 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1645 1648 return GitDiff('')
1646 1649
1647 1650
1648 1651 class CollectionGenerator(object):
1649 1652
1650 1653 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1651 1654 self.repo = repo
1652 1655 self.commit_ids = commit_ids
1653 1656 # TODO: (oliver) this isn't currently hooked up
1654 1657 self.collection_size = None
1655 1658 self.pre_load = pre_load
1656 1659 self.translate_tag = translate_tag
1657 1660
1658 1661 def __len__(self):
1659 1662 if self.collection_size is not None:
1660 1663 return self.collection_size
1661 1664 return self.commit_ids.__len__()
1662 1665
1663 1666 def __iter__(self):
1664 1667 for commit_id in self.commit_ids:
1665 1668 # TODO: johbo: Mercurial passes in commit indices or commit ids
1666 1669 yield self._commit_factory(commit_id)
1667 1670
1668 1671 def _commit_factory(self, commit_id):
1669 1672 """
1670 1673 Allows backends to override the way commits are generated.
1671 1674 """
1672 1675 return self.repo.get_commit(
1673 1676 commit_id=commit_id, pre_load=self.pre_load,
1674 1677 translate_tag=self.translate_tag)
1675 1678
1676 1679 def __getslice__(self, i, j):
1677 1680 """
1678 1681 Returns an iterator of sliced repository
1679 1682 """
1680 1683 commit_ids = self.commit_ids[i:j]
1681 1684 return self.__class__(
1682 1685 self.repo, commit_ids, pre_load=self.pre_load,
1683 1686 translate_tag=self.translate_tag)
1684 1687
1685 1688 def __repr__(self):
1686 1689 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1687 1690
1688 1691
1689 1692 class Config(object):
1690 1693 """
1691 1694 Represents the configuration for a repository.
1692 1695
1693 1696 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1694 1697 standard library. It implements only the needed subset.
1695 1698 """
1696 1699
1697 1700 def __init__(self):
1698 1701 self._values = {}
1699 1702
1700 1703 def copy(self):
1701 1704 clone = Config()
1702 1705 for section, values in self._values.items():
1703 1706 clone._values[section] = values.copy()
1704 1707 return clone
1705 1708
1706 1709 def __repr__(self):
1707 1710 return '<Config(%s sections) at %s>' % (
1708 1711 len(self._values), hex(id(self)))
1709 1712
1710 1713 def items(self, section):
1711 1714 return self._values.get(section, {}).iteritems()
1712 1715
1713 1716 def get(self, section, option):
1714 1717 return self._values.get(section, {}).get(option)
1715 1718
1716 1719 def set(self, section, option, value):
1717 1720 section_values = self._values.setdefault(section, {})
1718 1721 section_values[option] = value
1719 1722
1720 1723 def clear_section(self, section):
1721 1724 self._values[section] = {}
1722 1725
1723 1726 def serialize(self):
1724 1727 """
1725 1728 Creates a list of three tuples (section, key, value) representing
1726 1729 this config object.
1727 1730 """
1728 1731 items = []
1729 1732 for section in self._values:
1730 1733 for option, value in self._values[section].items():
1731 1734 items.append(
1732 1735 (safe_str(section), safe_str(option), safe_str(value)))
1733 1736 return items
1734 1737
1735 1738
1736 1739 class Diff(object):
1737 1740 """
1738 1741 Represents a diff result from a repository backend.
1739 1742
1740 1743 Subclasses have to provide a backend specific value for
1741 1744 :attr:`_header_re` and :attr:`_meta_re`.
1742 1745 """
1743 1746 _meta_re = None
1744 1747 _header_re = None
1745 1748
1746 1749 def __init__(self, raw_diff):
1747 1750 self.raw = raw_diff
1748 1751
1749 1752 def chunks(self):
1750 1753 """
1751 1754 split the diff in chunks of separate --git a/file b/file chunks
1752 1755 to make diffs consistent we must prepend with \n, and make sure
1753 1756 we can detect last chunk as this was also has special rule
1754 1757 """
1755 1758
1756 1759 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1757 1760 header = diff_parts[0]
1758 1761
1759 1762 if self._meta_re:
1760 1763 match = self._meta_re.match(header)
1761 1764
1762 1765 chunks = diff_parts[1:]
1763 1766 total_chunks = len(chunks)
1764 1767
1765 1768 return (
1766 1769 DiffChunk(chunk, self, cur_chunk == total_chunks)
1767 1770 for cur_chunk, chunk in enumerate(chunks, start=1))
1768 1771
1769 1772
1770 1773 class DiffChunk(object):
1771 1774
1772 1775 def __init__(self, chunk, diff, last_chunk):
1773 1776 self._diff = diff
1774 1777
1775 1778 # since we split by \ndiff --git that part is lost from original diff
1776 1779 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1777 1780 if not last_chunk:
1778 1781 chunk += '\n'
1779 1782
1780 1783 match = self._diff._header_re.match(chunk)
1781 1784 self.header = match.groupdict()
1782 1785 self.diff = chunk[match.end():]
1783 1786 self.raw = chunk
1784 1787
1785 1788
1786 1789 class BasePathPermissionChecker(object):
1787 1790
1788 1791 @staticmethod
1789 1792 def create_from_patterns(includes, excludes):
1790 1793 if includes and '*' in includes and not excludes:
1791 1794 return AllPathPermissionChecker()
1792 1795 elif excludes and '*' in excludes:
1793 1796 return NonePathPermissionChecker()
1794 1797 else:
1795 1798 return PatternPathPermissionChecker(includes, excludes)
1796 1799
1797 1800 @property
1798 1801 def has_full_access(self):
1799 1802 raise NotImplemented()
1800 1803
1801 1804 def has_access(self, path):
1802 1805 raise NotImplemented()
1803 1806
1804 1807
1805 1808 class AllPathPermissionChecker(BasePathPermissionChecker):
1806 1809
1807 1810 @property
1808 1811 def has_full_access(self):
1809 1812 return True
1810 1813
1811 1814 def has_access(self, path):
1812 1815 return True
1813 1816
1814 1817
1815 1818 class NonePathPermissionChecker(BasePathPermissionChecker):
1816 1819
1817 1820 @property
1818 1821 def has_full_access(self):
1819 1822 return False
1820 1823
1821 1824 def has_access(self, path):
1822 1825 return False
1823 1826
1824 1827
1825 1828 class PatternPathPermissionChecker(BasePathPermissionChecker):
1826 1829
1827 1830 def __init__(self, includes, excludes):
1828 1831 self.includes = includes
1829 1832 self.excludes = excludes
1830 1833 self.includes_re = [] if not includes else [
1831 1834 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1832 1835 self.excludes_re = [] if not excludes else [
1833 1836 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1834 1837
1835 1838 @property
1836 1839 def has_full_access(self):
1837 1840 return '*' in self.includes and not self.excludes
1838 1841
1839 1842 def has_access(self, path):
1840 1843 for regex in self.excludes_re:
1841 1844 if regex.match(path):
1842 1845 return False
1843 1846 for regex in self.includes_re:
1844 1847 if regex.match(path):
1845 1848 return True
1846 1849 return False
@@ -1,932 +1,937 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 do_workspace_checkout=False, with_wire=None, bare=False):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param do_workspace_checkout=False: sets update of working copy after
71 71 making a clone
72 72 :param bare: not used, compatible with other VCS
73 73 """
74 74
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 # mercurial since 4.4.X requires certain configuration to be present
77 77 # because sometimes we init the repos with config we need to meet
78 78 # special requirements
79 79 self.config = config if config else self.get_default_config(
80 80 default=[('extensions', 'largefiles', '1')])
81 81 self.with_wire = with_wire
82 82
83 83 self._init_repo(create, src_url, do_workspace_checkout)
84 84
85 85 # caches
86 86 self._commit_ids = {}
87 87
88 88 @LazyProperty
89 89 def _remote(self):
90 90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 91
92 92 @LazyProperty
93 93 def commit_ids(self):
94 94 """
95 95 Returns list of commit ids, in ascending order. Being lazy
96 96 attribute allows external tools to inject shas from cache.
97 97 """
98 98 commit_ids = self._get_all_commit_ids()
99 99 self._rebuild_cache(commit_ids)
100 100 return commit_ids
101 101
102 102 def _rebuild_cache(self, commit_ids):
103 103 self._commit_ids = dict((commit_id, index)
104 104 for index, commit_id in enumerate(commit_ids))
105 105
106 106 @LazyProperty
107 107 def branches(self):
108 108 return self._get_branches()
109 109
110 110 @LazyProperty
111 111 def branches_closed(self):
112 112 return self._get_branches(active=False, closed=True)
113 113
114 114 @LazyProperty
115 115 def branches_all(self):
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 def _get_branches(self, active=True, closed=False):
122 122 """
123 123 Gets branches for this repository
124 124 Returns only not closed active branches by default
125 125
126 126 :param active: return also active branches
127 127 :param closed: return also closed branches
128 128
129 129 """
130 130 if self.is_empty():
131 131 return {}
132 132
133 133 def get_name(ctx):
134 134 return ctx[0]
135 135
136 136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 137 self._remote.branches(active, closed).items()]
138 138
139 139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 140
141 141 @LazyProperty
142 142 def tags(self):
143 143 """
144 144 Gets tags for this repository
145 145 """
146 146 return self._get_tags()
147 147
148 148 def _get_tags(self):
149 149 if self.is_empty():
150 150 return {}
151 151
152 152 def get_name(ctx):
153 153 return ctx[0]
154 154
155 155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 156 self._remote.tags().items()]
157 157
158 158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 159
160 160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 161 **kwargs):
162 162 """
163 163 Creates and returns a tag for the given ``commit_id``.
164 164
165 165 :param name: name for new tag
166 166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 167 :param commit_id: commit id for which new tag would be created
168 168 :param message: message of the tag's commit
169 169 :param date: date of tag's commit
170 170
171 171 :raises TagAlreadyExistError: if tag with same name already exists
172 172 """
173 173 if name in self.tags:
174 174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 175 commit = self.get_commit(commit_id=commit_id)
176 176 local = kwargs.setdefault('local', False)
177 177
178 178 if message is None:
179 179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 180
181 181 date, tz = date_to_timestamp_plus_offset(date)
182 182
183 183 self._remote.tag(
184 184 name, commit.raw_id, message, local, user, date, tz)
185 185 self._remote.invalidate_vcs_cache()
186 186
187 187 # Reinitialize tags
188 188 self.tags = self._get_tags()
189 189 tag_id = self.tags[name]
190 190
191 191 return self.get_commit(commit_id=tag_id)
192 192
193 193 def remove_tag(self, name, user, message=None, date=None):
194 194 """
195 195 Removes tag with the given `name`.
196 196
197 197 :param name: name of the tag to be removed
198 198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 199 :param message: message of the tag's removal commit
200 200 :param date: date of tag's removal commit
201 201
202 202 :raises TagDoesNotExistError: if tag with given name does not exists
203 203 """
204 204 if name not in self.tags:
205 205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 206 if message is None:
207 207 message = "Removed tag %s" % name
208 208 local = False
209 209
210 210 date, tz = date_to_timestamp_plus_offset(date)
211 211
212 212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 213 self._remote.invalidate_vcs_cache()
214 214 self.tags = self._get_tags()
215 215
216 216 @LazyProperty
217 217 def bookmarks(self):
218 218 """
219 219 Gets bookmarks for this repository
220 220 """
221 221 return self._get_bookmarks()
222 222
223 223 def _get_bookmarks(self):
224 224 if self.is_empty():
225 225 return {}
226 226
227 227 def get_name(ctx):
228 228 return ctx[0]
229 229
230 230 _bookmarks = [
231 231 (safe_unicode(n), hexlify(h)) for n, h in
232 232 self._remote.bookmarks().items()]
233 233
234 234 return OrderedDict(sorted(_bookmarks, key=get_name))
235 235
236 236 def _get_all_commit_ids(self):
237 237 return self._remote.get_all_commit_ids('visible')
238 238
239 239 def get_diff(
240 240 self, commit1, commit2, path='', ignore_whitespace=False,
241 241 context=3, path1=None):
242 242 """
243 243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 244 `commit2` since `commit1`.
245 245
246 246 :param commit1: Entry point from which diff is shown. Can be
247 247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 248 the changes since empty state of the repository until `commit2`
249 249 :param commit2: Until which commit changes should be shown.
250 250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 251 changes. Defaults to ``False``.
252 252 :param context: How many lines before/after changed lines should be
253 253 shown. Defaults to ``3``.
254 254 """
255 255 self._validate_diff_commits(commit1, commit2)
256 256 if path1 is not None and path1 != path:
257 257 raise ValueError("Diff of two different paths not supported.")
258 258
259 259 if path:
260 260 file_filter = [self.path, path]
261 261 else:
262 262 file_filter = None
263 263
264 264 diff = self._remote.diff(
265 265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 266 opt_git=True, opt_ignorews=ignore_whitespace,
267 267 context=context)
268 268 return MercurialDiff(diff)
269 269
270 270 def strip(self, commit_id, branch=None):
271 271 self._remote.strip(commit_id, update=False, backup="none")
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 self.commit_ids = self._get_all_commit_ids()
275 275 self._rebuild_cache(self.commit_ids)
276 276
277 277 def verify(self):
278 278 verify = self._remote.verify()
279 279
280 280 self._remote.invalidate_vcs_cache()
281 281 return verify
282 282
283 283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 284 if commit_id1 == commit_id2:
285 285 return commit_id1
286 286
287 287 ancestors = self._remote.revs_from_revspec(
288 288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 289 other_path=repo2.path)
290 290 return repo2[ancestors[0]].raw_id if ancestors else None
291 291
292 292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 293 if commit_id1 == commit_id2:
294 294 commits = []
295 295 else:
296 296 if merge:
297 297 indexes = self._remote.revs_from_revspec(
298 298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 300 else:
301 301 indexes = self._remote.revs_from_revspec(
302 302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 303 commit_id1, other_path=repo2.path)
304 304
305 305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 306 for idx in indexes]
307 307
308 308 return commits
309 309
310 310 @staticmethod
311 311 def check_url(url, config):
312 312 """
313 313 Function will check given url and try to verify if it's a valid
314 314 link. Sometimes it may happened that mercurial will issue basic
315 315 auth request that can cause whole API to hang when used from python
316 316 or other external calls.
317 317
318 318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 319 when the return code is non 200
320 320 """
321 321 # check first if it's not an local url
322 322 if os.path.isdir(url) or url.startswith('file:'):
323 323 return True
324 324
325 325 # Request the _remote to verify the url
326 326 return connection.Hg.check_url(url, config.serialize())
327 327
328 328 @staticmethod
329 329 def is_valid_repository(path):
330 330 return os.path.isdir(os.path.join(path, '.hg'))
331 331
332 332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 333 """
334 334 Function will check for mercurial repository in given path. If there
335 335 is no repository in that path it will raise an exception unless
336 336 `create` parameter is set to True - in that case repository would
337 337 be created.
338 338
339 339 If `src_url` is given, would try to clone repository from the
340 340 location at given clone_point. Additionally it'll make update to
341 341 working copy accordingly to `do_workspace_checkout` flag.
342 342 """
343 343 if create and os.path.exists(self.path):
344 344 raise RepositoryError(
345 345 "Cannot create repository at %s, location already exist"
346 346 % self.path)
347 347
348 348 if src_url:
349 349 url = str(self._get_url(src_url))
350 350 MercurialRepository.check_url(url, self.config)
351 351
352 352 self._remote.clone(url, self.path, do_workspace_checkout)
353 353
354 354 # Don't try to create if we've already cloned repo
355 355 create = False
356 356
357 357 if create:
358 358 os.makedirs(self.path, mode=0o755)
359 359
360 360 self._remote.localrepository(create)
361 361
362 362 @LazyProperty
363 363 def in_memory_commit(self):
364 364 return MercurialInMemoryCommit(self)
365 365
366 366 @LazyProperty
367 367 def description(self):
368 368 description = self._remote.get_config_value(
369 369 'web', 'description', untrusted=True)
370 370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371 371
372 372 @LazyProperty
373 373 def contact(self):
374 374 contact = (
375 375 self._remote.get_config_value("web", "contact") or
376 376 self._remote.get_config_value("ui", "username"))
377 377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378 378
379 379 @LazyProperty
380 380 def last_change(self):
381 381 """
382 382 Returns last change made on this repository as
383 383 `datetime.datetime` object.
384 384 """
385 385 try:
386 386 return self.get_commit().date
387 387 except RepositoryError:
388 388 tzoffset = makedate()[1]
389 389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390 390
391 391 def _get_fs_mtime(self):
392 392 # fallback to filesystem
393 393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 394 st_path = os.path.join(self.path, '.hg', "store")
395 395 if os.path.exists(cl_path):
396 396 return os.stat(cl_path).st_mtime
397 397 else:
398 398 return os.stat(st_path).st_mtime
399 399
400 400 def _get_url(self, url):
401 401 """
402 402 Returns normalized url. If schema is not given, would fall
403 403 to filesystem
404 404 (``file:///``) schema.
405 405 """
406 406 url = url.encode('utf8')
407 407 if url != 'default' and '://' not in url:
408 408 url = "file:" + urllib.pathname2url(url)
409 409 return url
410 410
411 411 def get_hook_location(self):
412 412 """
413 413 returns absolute path to location where hooks are stored
414 414 """
415 415 return os.path.join(self.path, '.hg', '.hgrc')
416 416
417 417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 418 """
419 419 Returns ``MercurialCommit`` object representing repository's
420 420 commit at the given `commit_id` or `commit_idx`.
421 421 """
422 422 if self.is_empty():
423 423 raise EmptyRepositoryError("There are no commits yet")
424 424
425 425 if commit_id is not None:
426 426 self._validate_commit_id(commit_id)
427 427 try:
428 428 idx = self._commit_ids[commit_id]
429 429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 430 except KeyError:
431 431 pass
432 432 elif commit_idx is not None:
433 433 self._validate_commit_idx(commit_idx)
434 434 try:
435 435 id_ = self.commit_ids[commit_idx]
436 436 if commit_idx < 0:
437 437 commit_idx += len(self.commit_ids)
438 438 return MercurialCommit(
439 439 self, id_, commit_idx, pre_load=pre_load)
440 440 except IndexError:
441 441 commit_id = commit_idx
442 442 else:
443 443 commit_id = "tip"
444 444
445 445 if isinstance(commit_id, unicode):
446 446 commit_id = safe_str(commit_id)
447 447
448 448 try:
449 449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 450 except CommitDoesNotExistError:
451 451 msg = "Commit %s does not exist for %s" % (
452 452 commit_id, self)
453 453 raise CommitDoesNotExistError(msg)
454 454
455 455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 456
457 457 def get_commits(
458 458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 460 """
461 461 Returns generator of ``MercurialCommit`` objects from start to end
462 462 (both are inclusive)
463 463
464 464 :param start_id: None, str(commit_id)
465 465 :param end_id: None, str(commit_id)
466 466 :param start_date: if specified, commits with commit date less than
467 467 ``start_date`` would be filtered out from returned set
468 468 :param end_date: if specified, commits with commit date greater than
469 469 ``end_date`` would be filtered out from returned set
470 470 :param branch_name: if specified, commits not reachable from given
471 471 branch would be filtered out from returned set
472 472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 473 Mercurial evolve
474 474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 475 exist.
476 476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 477 ``end`` could not be found.
478 478 """
479 479 # actually we should check now if it's not an empty repo
480 480 branch_ancestors = False
481 481 if self.is_empty():
482 482 raise EmptyRepositoryError("There are no commits yet")
483 483 self._validate_branch_name(branch_name)
484 484
485 485 if start_id is not None:
486 486 self._validate_commit_id(start_id)
487 487 c_start = self.get_commit(commit_id=start_id)
488 488 start_pos = self._commit_ids[c_start.raw_id]
489 489 else:
490 490 start_pos = None
491 491
492 492 if end_id is not None:
493 493 self._validate_commit_id(end_id)
494 494 c_end = self.get_commit(commit_id=end_id)
495 495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 496 else:
497 497 end_pos = None
498 498
499 499 if None not in [start_id, end_id] and start_pos > end_pos:
500 500 raise RepositoryError(
501 501 "Start commit '%s' cannot be after end commit '%s'" %
502 502 (start_id, end_id))
503 503
504 504 if end_pos is not None:
505 505 end_pos += 1
506 506
507 507 commit_filter = []
508 508
509 509 if branch_name and not branch_ancestors:
510 510 commit_filter.append('branch("%s")' % (branch_name,))
511 511 elif branch_name and branch_ancestors:
512 512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 513
514 514 if start_date and not end_date:
515 515 commit_filter.append('date(">%s")' % (start_date,))
516 516 if end_date and not start_date:
517 517 commit_filter.append('date("<%s")' % (end_date,))
518 518 if start_date and end_date:
519 519 commit_filter.append(
520 520 'date(">%s") and date("<%s")' % (start_date, end_date))
521 521
522 522 if not show_hidden:
523 523 commit_filter.append('not obsolete()')
524 524 commit_filter.append('not hidden()')
525 525
526 526 # TODO: johbo: Figure out a simpler way for this solution
527 527 collection_generator = CollectionGenerator
528 528 if commit_filter:
529 529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 530 revisions = self._remote.rev_range([commit_filter])
531 531 collection_generator = MercurialIndexBasedCollectionGenerator
532 532 else:
533 533 revisions = self.commit_ids
534 534
535 535 if start_pos or end_pos:
536 536 revisions = revisions[start_pos:end_pos]
537 537
538 538 return collection_generator(self, revisions, pre_load=pre_load)
539 539
540 540 def pull(self, url, commit_ids=None):
541 541 """
542 542 Pull changes from external location.
543 543
544 544 :param commit_ids: Optional. Can be set to a list of commit ids
545 545 which shall be pulled from the other repository.
546 546 """
547 547 url = self._get_url(url)
548 548 self._remote.pull(url, commit_ids=commit_ids)
549 549 self._remote.invalidate_vcs_cache()
550 550
551 551 def fetch(self, url, commit_ids=None):
552 552 """
553 553 Backward compatibility with GIT fetch==pull
554 554 """
555 555 return self.pull(url, commit_ids=commit_ids)
556 556
557 557 def push(self, url):
558 558 url = self._get_url(url)
559 559 self._remote.sync_push(url)
560 560
561 561 def _local_clone(self, clone_path):
562 562 """
563 563 Create a local clone of the current repo.
564 564 """
565 565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 566 hooks=False)
567 567
568 568 def _update(self, revision, clean=False):
569 569 """
570 570 Update the working copy to the specified revision.
571 571 """
572 572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 573 self._remote.update(revision, clean=clean)
574 574
575 575 def _identify(self):
576 576 """
577 577 Return the current state of the working directory.
578 578 """
579 579 return self._remote.identify().strip().rstrip('+')
580 580
581 581 def _heads(self, branch=None):
582 582 """
583 583 Return the commit ids of the repository heads.
584 584 """
585 585 return self._remote.heads(branch=branch).strip().split(' ')
586 586
587 587 def _ancestor(self, revision1, revision2):
588 588 """
589 589 Return the common ancestor of the two revisions.
590 590 """
591 591 return self._remote.ancestor(revision1, revision2)
592 592
593 593 def _local_push(
594 594 self, revision, repository_path, push_branches=False,
595 595 enable_hooks=False):
596 596 """
597 597 Push the given revision to the specified repository.
598 598
599 599 :param push_branches: allow to create branches in the target repo.
600 600 """
601 601 self._remote.push(
602 602 [revision], repository_path, hooks=enable_hooks,
603 603 push_branches=push_branches)
604 604
605 605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 606 source_ref, use_rebase=False, dry_run=False):
607 607 """
608 608 Merge the given source_revision into the checked out revision.
609 609
610 610 Returns the commit id of the merge and a boolean indicating if the
611 611 commit needs to be pushed.
612 612 """
613 613 self._update(target_ref.commit_id, clean=True)
614 614
615 615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 617
618 618 if ancestor == source_ref.commit_id:
619 619 # Nothing to do, the changes were already integrated
620 620 return target_ref.commit_id, False
621 621
622 622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 623 # In this case we should force a commit message
624 624 return source_ref.commit_id, True
625 625
626 626 if use_rebase:
627 627 try:
628 628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 629 target_ref.commit_id)
630 630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 631 self._remote.rebase(
632 632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 633 self._remote.invalidate_vcs_cache()
634 634 self._update(bookmark_name, clean=True)
635 635 return self._identify(), True
636 636 except RepositoryError:
637 637 # The rebase-abort may raise another exception which 'hides'
638 638 # the original one, therefore we log it here.
639 639 log.exception('Error while rebasing shadow repo during merge.')
640 640
641 641 # Cleanup any rebase leftovers
642 642 self._remote.invalidate_vcs_cache()
643 643 self._remote.rebase(abort=True)
644 644 self._remote.invalidate_vcs_cache()
645 645 self._remote.update(clean=True)
646 646 raise
647 647 else:
648 648 try:
649 649 self._remote.merge(source_ref.commit_id)
650 650 self._remote.invalidate_vcs_cache()
651 651 self._remote.commit(
652 652 message=safe_str(merge_message),
653 653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 654 self._remote.invalidate_vcs_cache()
655 655 return self._identify(), True
656 656 except RepositoryError:
657 657 # Cleanup any merge leftovers
658 658 self._remote.update(clean=True)
659 659 raise
660 660
661 661 def _local_close(self, target_ref, user_name, user_email,
662 662 source_ref, close_message=''):
663 663 """
664 664 Close the branch of the given source_revision
665 665
666 666 Returns the commit id of the close and a boolean indicating if the
667 667 commit needs to be pushed.
668 668 """
669 669 self._update(source_ref.commit_id)
670 670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 671 try:
672 672 self._remote.commit(
673 673 message=safe_str(message),
674 674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 675 close_branch=True)
676 676 self._remote.invalidate_vcs_cache()
677 677 return self._identify(), True
678 678 except RepositoryError:
679 679 # Cleanup any commit leftovers
680 680 self._remote.update(clean=True)
681 681 raise
682 682
683 683 def _is_the_same_branch(self, target_ref, source_ref):
684 684 return (
685 685 self._get_branch_name(target_ref) ==
686 686 self._get_branch_name(source_ref))
687 687
688 688 def _get_branch_name(self, ref):
689 689 if ref.type == 'branch':
690 690 return ref.name
691 691 return self._remote.ctx_branch(ref.commit_id)
692 692
693 693 def _maybe_prepare_merge_workspace(
694 694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 695 shadow_repository_path = self._get_shadow_repository_path(
696 696 repo_id, workspace_id)
697 697 if not os.path.exists(shadow_repository_path):
698 698 self._local_clone(shadow_repository_path)
699 699 log.debug(
700 700 'Prepared shadow repository in %s', shadow_repository_path)
701 701
702 702 return shadow_repository_path
703 703
704 704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 705 source_repo, source_ref, merge_message,
706 706 merger_name, merger_email, dry_run=False,
707 707 use_rebase=False, close_branch=False):
708 708
709 709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 710 'rebase' if use_rebase else 'merge', dry_run)
711 711 if target_ref.commit_id not in self._heads():
712 712 return MergeResponse(
713 713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 714 metadata={'target_ref': target_ref})
715 715
716 716 try:
717 717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 heads = ','.join(self._heads(target_ref.name))
718 heads = '\n,'.join(self._heads(target_ref.name))
719 metadata = {
720 'target_ref': target_ref,
721 'source_ref': source_ref,
722 'heads': heads
723 }
719 724 return MergeResponse(
720 725 False, False, None,
721 726 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
722 metadata={'heads': heads})
727 metadata=metadata)
723 728 except CommitDoesNotExistError:
724 729 log.exception('Failure when looking up branch heads on hg target')
725 730 return MergeResponse(
726 731 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
727 732 metadata={'target_ref': target_ref})
728 733
729 734 shadow_repository_path = self._maybe_prepare_merge_workspace(
730 735 repo_id, workspace_id, target_ref, source_ref)
731 736 shadow_repo = self._get_shadow_instance(shadow_repository_path)
732 737
733 738 log.debug('Pulling in target reference %s', target_ref)
734 739 self._validate_pull_reference(target_ref)
735 740 shadow_repo._local_pull(self.path, target_ref)
736 741
737 742 try:
738 743 log.debug('Pulling in source reference %s', source_ref)
739 744 source_repo._validate_pull_reference(source_ref)
740 745 shadow_repo._local_pull(source_repo.path, source_ref)
741 746 except CommitDoesNotExistError:
742 747 log.exception('Failure when doing local pull on hg shadow repo')
743 748 return MergeResponse(
744 749 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
745 750 metadata={'source_ref': source_ref})
746 751
747 752 merge_ref = None
748 753 merge_commit_id = None
749 754 close_commit_id = None
750 755 merge_failure_reason = MergeFailureReason.NONE
751 756 metadata = {}
752 757
753 758 # enforce that close branch should be used only in case we source from
754 759 # an actual Branch
755 760 close_branch = close_branch and source_ref.type == 'branch'
756 761
757 762 # don't allow to close branch if source and target are the same
758 763 close_branch = close_branch and source_ref.name != target_ref.name
759 764
760 765 needs_push_on_close = False
761 766 if close_branch and not use_rebase and not dry_run:
762 767 try:
763 768 close_commit_id, needs_push_on_close = shadow_repo._local_close(
764 769 target_ref, merger_name, merger_email, source_ref)
765 770 merge_possible = True
766 771 except RepositoryError:
767 772 log.exception('Failure when doing close branch on '
768 773 'shadow repo: %s', shadow_repo)
769 774 merge_possible = False
770 775 merge_failure_reason = MergeFailureReason.MERGE_FAILED
771 776 else:
772 777 merge_possible = True
773 778
774 779 needs_push = False
775 780 if merge_possible:
776 781 try:
777 782 merge_commit_id, needs_push = shadow_repo._local_merge(
778 783 target_ref, merge_message, merger_name, merger_email,
779 784 source_ref, use_rebase=use_rebase, dry_run=dry_run)
780 785 merge_possible = True
781 786
782 787 # read the state of the close action, if it
783 788 # maybe required a push
784 789 needs_push = needs_push or needs_push_on_close
785 790
786 791 # Set a bookmark pointing to the merge commit. This bookmark
787 792 # may be used to easily identify the last successful merge
788 793 # commit in the shadow repository.
789 794 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
790 795 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
791 796 except SubrepoMergeError:
792 797 log.exception(
793 798 'Subrepo merge error during local merge on hg shadow repo.')
794 799 merge_possible = False
795 800 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
796 801 needs_push = False
797 802 except RepositoryError:
798 803 log.exception('Failure when doing local merge on hg shadow repo')
799 804 merge_possible = False
800 805 merge_failure_reason = MergeFailureReason.MERGE_FAILED
801 806 needs_push = False
802 807
803 808 if merge_possible and not dry_run:
804 809 if needs_push:
805 810 # In case the target is a bookmark, update it, so after pushing
806 811 # the bookmarks is also updated in the target.
807 812 if target_ref.type == 'book':
808 813 shadow_repo.bookmark(
809 814 target_ref.name, revision=merge_commit_id)
810 815 try:
811 816 shadow_repo_with_hooks = self._get_shadow_instance(
812 817 shadow_repository_path,
813 818 enable_hooks=True)
814 819 # This is the actual merge action, we push from shadow
815 820 # into origin.
816 821 # Note: the push_branches option will push any new branch
817 822 # defined in the source repository to the target. This may
818 823 # be dangerous as branches are permanent in Mercurial.
819 824 # This feature was requested in issue #441.
820 825 shadow_repo_with_hooks._local_push(
821 826 merge_commit_id, self.path, push_branches=True,
822 827 enable_hooks=True)
823 828
824 829 # maybe we also need to push the close_commit_id
825 830 if close_commit_id:
826 831 shadow_repo_with_hooks._local_push(
827 832 close_commit_id, self.path, push_branches=True,
828 833 enable_hooks=True)
829 834 merge_succeeded = True
830 835 except RepositoryError:
831 836 log.exception(
832 837 'Failure when doing local push from the shadow '
833 838 'repository to the target repository at %s.', self.path)
834 839 merge_succeeded = False
835 840 merge_failure_reason = MergeFailureReason.PUSH_FAILED
836 841 metadata['target'] = 'hg shadow repo'
837 842 metadata['merge_commit'] = merge_commit_id
838 843 else:
839 844 merge_succeeded = True
840 845 else:
841 846 merge_succeeded = False
842 847
843 848 return MergeResponse(
844 849 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
845 850 metadata=metadata)
846 851
847 852 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
848 853 config = self.config.copy()
849 854 if not enable_hooks:
850 855 config.clear_section('hooks')
851 856 return MercurialRepository(shadow_repository_path, config)
852 857
853 858 def _validate_pull_reference(self, reference):
854 859 if not (reference.name in self.bookmarks or
855 860 reference.name in self.branches or
856 861 self.get_commit(reference.commit_id)):
857 862 raise CommitDoesNotExistError(
858 863 'Unknown branch, bookmark or commit id')
859 864
860 865 def _local_pull(self, repository_path, reference):
861 866 """
862 867 Fetch a branch, bookmark or commit from a local repository.
863 868 """
864 869 repository_path = os.path.abspath(repository_path)
865 870 if repository_path == self.path:
866 871 raise ValueError('Cannot pull from the same repository')
867 872
868 873 reference_type_to_option_name = {
869 874 'book': 'bookmark',
870 875 'branch': 'branch',
871 876 }
872 877 option_name = reference_type_to_option_name.get(
873 878 reference.type, 'revision')
874 879
875 880 if option_name == 'revision':
876 881 ref = reference.commit_id
877 882 else:
878 883 ref = reference.name
879 884
880 885 options = {option_name: [ref]}
881 886 self._remote.pull_cmd(repository_path, hooks=False, **options)
882 887 self._remote.invalidate_vcs_cache()
883 888
884 889 def bookmark(self, bookmark, revision=None):
885 890 if isinstance(bookmark, unicode):
886 891 bookmark = safe_str(bookmark)
887 892 self._remote.bookmark(bookmark, revision=revision)
888 893 self._remote.invalidate_vcs_cache()
889 894
890 895 def get_path_permissions(self, username):
891 896 hgacl_file = os.path.join(self.path, '.hg/hgacl')
892 897
893 898 def read_patterns(suffix):
894 899 svalue = None
895 900 try:
896 901 svalue = hgacl.get('narrowhgacl', username + suffix)
897 902 except configparser.NoOptionError:
898 903 try:
899 904 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
900 905 except configparser.NoOptionError:
901 906 pass
902 907 if not svalue:
903 908 return None
904 909 result = ['/']
905 910 for pattern in svalue.split():
906 911 result.append(pattern)
907 912 if '*' not in pattern and '?' not in pattern:
908 913 result.append(pattern + '/*')
909 914 return result
910 915
911 916 if os.path.exists(hgacl_file):
912 917 try:
913 918 hgacl = configparser.RawConfigParser()
914 919 hgacl.read(hgacl_file)
915 920
916 921 includes = read_patterns('.includes')
917 922 excludes = read_patterns('.excludes')
918 923 return BasePathPermissionChecker.create_from_patterns(
919 924 includes, excludes)
920 925 except BaseException as e:
921 926 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
922 927 hgacl_file, self.name, e)
923 928 raise exceptions.RepositoryRequirementError(msg)
924 929 else:
925 930 return None
926 931
927 932
928 933 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
929 934
930 935 def _commit_factory(self, commit_id):
931 936 return self.repo.get_commit(
932 937 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,4981 +1,5021 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 import string
28 29 import hashlib
29 30 import logging
30 31 import datetime
31 32 import warnings
32 33 import ipaddress
33 34 import functools
34 35 import traceback
35 36 import collections
36 37
37 38 from sqlalchemy import (
38 39 or_, and_, not_, func, TypeDecorator, event,
39 40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 42 Text, Float, PickleType)
42 43 from sqlalchemy.sql.expression import true, false, case
43 44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 45 from sqlalchemy.orm import (
45 46 relationship, joinedload, class_mapper, validates, aliased)
46 47 from sqlalchemy.ext.declarative import declared_attr
47 48 from sqlalchemy.ext.hybrid import hybrid_property
48 49 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 50 from sqlalchemy.dialects.mysql import LONGTEXT
50 51 from zope.cachedescriptors.property import Lazy as LazyProperty
51 52 from pyramid import compat
52 53 from pyramid.threadlocal import get_current_request
54 from webhelpers.text import collapse, remove_formatting
53 55
54 56 from rhodecode.translation import _
55 57 from rhodecode.lib.vcs import get_vcs_instance
56 58 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 59 from rhodecode.lib.utils2 import (
58 60 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 62 glob2re, StrictAttributeDict, cleaned_uri)
61 63 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 64 JsonRaw
63 65 from rhodecode.lib.ext_json import json
64 66 from rhodecode.lib.caching_query import FromCache
65 67 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
66 68 from rhodecode.lib.encrypt2 import Encryptor
67 69 from rhodecode.model.meta import Base, Session
68 70
69 71 URL_SEP = '/'
70 72 log = logging.getLogger(__name__)
71 73
72 74 # =============================================================================
73 75 # BASE CLASSES
74 76 # =============================================================================
75 77
76 78 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 79 # beaker.session.secret if first is not set.
78 80 # and initialized at environment.py
79 81 ENCRYPTION_KEY = None
80 82
81 83 # used to sort permissions by types, '#' used here is not allowed to be in
82 84 # usernames, and it's very early in sorted string.printable table.
83 85 PERMISSION_TYPE_SORT = {
84 86 'admin': '####',
85 87 'write': '###',
86 88 'read': '##',
87 89 'none': '#',
88 90 }
89 91
90 92
91 93 def display_user_sort(obj):
92 94 """
93 95 Sort function used to sort permissions in .permissions() function of
94 96 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 97 of all other resources
96 98 """
97 99
98 100 if obj.username == User.DEFAULT_USER:
99 101 return '#####'
100 102 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 103 return prefix + obj.username
102 104
103 105
104 106 def display_user_group_sort(obj):
105 107 """
106 108 Sort function used to sort permissions in .permissions() function of
107 109 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 110 of all other resources
109 111 """
110 112
111 113 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 114 return prefix + obj.users_group_name
113 115
114 116
115 117 def _hash_key(k):
116 118 return sha1_safe(k)
117 119
118 120
119 121 def in_filter_generator(qry, items, limit=500):
120 122 """
121 123 Splits IN() into multiple with OR
122 124 e.g.::
123 125 cnt = Repository.query().filter(
124 126 or_(
125 127 *in_filter_generator(Repository.repo_id, range(100000))
126 128 )).count()
127 129 """
128 130 if not items:
129 131 # empty list will cause empty query which might cause security issues
130 132 # this can lead to hidden unpleasant results
131 133 items = [-1]
132 134
133 135 parts = []
134 136 for chunk in xrange(0, len(items), limit):
135 137 parts.append(
136 138 qry.in_(items[chunk: chunk + limit])
137 139 )
138 140
139 141 return parts
140 142
141 143
142 144 base_table_args = {
143 145 'extend_existing': True,
144 146 'mysql_engine': 'InnoDB',
145 147 'mysql_charset': 'utf8',
146 148 'sqlite_autoincrement': True
147 149 }
148 150
149 151
150 152 class EncryptedTextValue(TypeDecorator):
151 153 """
152 154 Special column for encrypted long text data, use like::
153 155
154 156 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155 157
156 158 This column is intelligent so if value is in unencrypted form it return
157 159 unencrypted form, but on save it always encrypts
158 160 """
159 161 impl = Text
160 162
161 163 def process_bind_param(self, value, dialect):
162 164 """
163 165 Setter for storing value
164 166 """
165 167 import rhodecode
166 168 if not value:
167 169 return value
168 170
169 171 # protect against double encrypting if values is already encrypted
170 172 if value.startswith('enc$aes$') \
171 173 or value.startswith('enc$aes_hmac$') \
172 174 or value.startswith('enc2$'):
173 175 raise ValueError('value needs to be in unencrypted format, '
174 176 'ie. not starting with enc$ or enc2$')
175 177
176 178 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
177 179 if algo == 'aes':
178 180 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
179 181 elif algo == 'fernet':
180 182 return Encryptor(ENCRYPTION_KEY).encrypt(value)
181 183 else:
182 184 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
183 185
184 186 def process_result_value(self, value, dialect):
185 187 """
186 188 Getter for retrieving value
187 189 """
188 190
189 191 import rhodecode
190 192 if not value:
191 193 return value
192 194
193 195 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
194 196 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
195 197 if algo == 'aes':
196 198 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
197 199 elif algo == 'fernet':
198 200 return Encryptor(ENCRYPTION_KEY).decrypt(value)
199 201 else:
200 202 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
201 203 return decrypted_data
202 204
203 205
204 206 class BaseModel(object):
205 207 """
206 208 Base Model for all classes
207 209 """
208 210
209 211 @classmethod
210 212 def _get_keys(cls):
211 213 """return column names for this model """
212 214 return class_mapper(cls).c.keys()
213 215
214 216 def get_dict(self):
215 217 """
216 218 return dict with keys and values corresponding
217 219 to this model data """
218 220
219 221 d = {}
220 222 for k in self._get_keys():
221 223 d[k] = getattr(self, k)
222 224
223 225 # also use __json__() if present to get additional fields
224 226 _json_attr = getattr(self, '__json__', None)
225 227 if _json_attr:
226 228 # update with attributes from __json__
227 229 if callable(_json_attr):
228 230 _json_attr = _json_attr()
229 231 for k, val in _json_attr.iteritems():
230 232 d[k] = val
231 233 return d
232 234
233 235 def get_appstruct(self):
234 236 """return list with keys and values tuples corresponding
235 237 to this model data """
236 238
237 239 lst = []
238 240 for k in self._get_keys():
239 241 lst.append((k, getattr(self, k),))
240 242 return lst
241 243
242 244 def populate_obj(self, populate_dict):
243 245 """populate model with data from given populate_dict"""
244 246
245 247 for k in self._get_keys():
246 248 if k in populate_dict:
247 249 setattr(self, k, populate_dict[k])
248 250
249 251 @classmethod
250 252 def query(cls):
251 253 return Session().query(cls)
252 254
253 255 @classmethod
254 256 def get(cls, id_):
255 257 if id_:
256 258 return cls.query().get(id_)
257 259
258 260 @classmethod
259 261 def get_or_404(cls, id_):
260 262 from pyramid.httpexceptions import HTTPNotFound
261 263
262 264 try:
263 265 id_ = int(id_)
264 266 except (TypeError, ValueError):
265 267 raise HTTPNotFound()
266 268
267 269 res = cls.query().get(id_)
268 270 if not res:
269 271 raise HTTPNotFound()
270 272 return res
271 273
272 274 @classmethod
273 275 def getAll(cls):
274 276 # deprecated and left for backward compatibility
275 277 return cls.get_all()
276 278
277 279 @classmethod
278 280 def get_all(cls):
279 281 return cls.query().all()
280 282
281 283 @classmethod
282 284 def delete(cls, id_):
283 285 obj = cls.query().get(id_)
284 286 Session().delete(obj)
285 287
286 288 @classmethod
287 289 def identity_cache(cls, session, attr_name, value):
288 290 exist_in_session = []
289 291 for (item_cls, pkey), instance in session.identity_map.items():
290 292 if cls == item_cls and getattr(instance, attr_name) == value:
291 293 exist_in_session.append(instance)
292 294 if exist_in_session:
293 295 if len(exist_in_session) == 1:
294 296 return exist_in_session[0]
295 297 log.exception(
296 298 'multiple objects with attr %s and '
297 299 'value %s found with same name: %r',
298 300 attr_name, value, exist_in_session)
299 301
300 302 def __repr__(self):
301 303 if hasattr(self, '__unicode__'):
302 304 # python repr needs to return str
303 305 try:
304 306 return safe_str(self.__unicode__())
305 307 except UnicodeDecodeError:
306 308 pass
307 309 return '<DB:%s>' % (self.__class__.__name__)
308 310
309 311
310 312 class RhodeCodeSetting(Base, BaseModel):
311 313 __tablename__ = 'rhodecode_settings'
312 314 __table_args__ = (
313 315 UniqueConstraint('app_settings_name'),
314 316 base_table_args
315 317 )
316 318
317 319 SETTINGS_TYPES = {
318 320 'str': safe_str,
319 321 'int': safe_int,
320 322 'unicode': safe_unicode,
321 323 'bool': str2bool,
322 324 'list': functools.partial(aslist, sep=',')
323 325 }
324 326 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
325 327 GLOBAL_CONF_KEY = 'app_settings'
326 328
327 329 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
328 330 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
329 331 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
330 332 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
331 333
332 334 def __init__(self, key='', val='', type='unicode'):
333 335 self.app_settings_name = key
334 336 self.app_settings_type = type
335 337 self.app_settings_value = val
336 338
337 339 @validates('_app_settings_value')
338 340 def validate_settings_value(self, key, val):
339 341 assert type(val) == unicode
340 342 return val
341 343
342 344 @hybrid_property
343 345 def app_settings_value(self):
344 346 v = self._app_settings_value
345 347 _type = self.app_settings_type
346 348 if _type:
347 349 _type = self.app_settings_type.split('.')[0]
348 350 # decode the encrypted value
349 351 if 'encrypted' in self.app_settings_type:
350 352 cipher = EncryptedTextValue()
351 353 v = safe_unicode(cipher.process_result_value(v, None))
352 354
353 355 converter = self.SETTINGS_TYPES.get(_type) or \
354 356 self.SETTINGS_TYPES['unicode']
355 357 return converter(v)
356 358
357 359 @app_settings_value.setter
358 360 def app_settings_value(self, val):
359 361 """
360 362 Setter that will always make sure we use unicode in app_settings_value
361 363
362 364 :param val:
363 365 """
364 366 val = safe_unicode(val)
365 367 # encode the encrypted value
366 368 if 'encrypted' in self.app_settings_type:
367 369 cipher = EncryptedTextValue()
368 370 val = safe_unicode(cipher.process_bind_param(val, None))
369 371 self._app_settings_value = val
370 372
371 373 @hybrid_property
372 374 def app_settings_type(self):
373 375 return self._app_settings_type
374 376
375 377 @app_settings_type.setter
376 378 def app_settings_type(self, val):
377 379 if val.split('.')[0] not in self.SETTINGS_TYPES:
378 380 raise Exception('type must be one of %s got %s'
379 381 % (self.SETTINGS_TYPES.keys(), val))
380 382 self._app_settings_type = val
381 383
382 384 @classmethod
383 385 def get_by_prefix(cls, prefix):
384 386 return RhodeCodeSetting.query()\
385 387 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
386 388 .all()
387 389
388 390 def __unicode__(self):
389 391 return u"<%s('%s:%s[%s]')>" % (
390 392 self.__class__.__name__,
391 393 self.app_settings_name, self.app_settings_value,
392 394 self.app_settings_type
393 395 )
394 396
395 397
396 398 class RhodeCodeUi(Base, BaseModel):
397 399 __tablename__ = 'rhodecode_ui'
398 400 __table_args__ = (
399 401 UniqueConstraint('ui_key'),
400 402 base_table_args
401 403 )
402 404
403 405 HOOK_REPO_SIZE = 'changegroup.repo_size'
404 406 # HG
405 407 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
406 408 HOOK_PULL = 'outgoing.pull_logger'
407 409 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
408 410 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
409 411 HOOK_PUSH = 'changegroup.push_logger'
410 412 HOOK_PUSH_KEY = 'pushkey.key_push'
411 413
414 HOOKS_BUILTIN = [
415 HOOK_PRE_PULL,
416 HOOK_PULL,
417 HOOK_PRE_PUSH,
418 HOOK_PRETX_PUSH,
419 HOOK_PUSH,
420 HOOK_PUSH_KEY,
421 ]
422
412 423 # TODO: johbo: Unify way how hooks are configured for git and hg,
413 424 # git part is currently hardcoded.
414 425
415 426 # SVN PATTERNS
416 427 SVN_BRANCH_ID = 'vcs_svn_branch'
417 428 SVN_TAG_ID = 'vcs_svn_tag'
418 429
419 430 ui_id = Column(
420 431 "ui_id", Integer(), nullable=False, unique=True, default=None,
421 432 primary_key=True)
422 433 ui_section = Column(
423 434 "ui_section", String(255), nullable=True, unique=None, default=None)
424 435 ui_key = Column(
425 436 "ui_key", String(255), nullable=True, unique=None, default=None)
426 437 ui_value = Column(
427 438 "ui_value", String(255), nullable=True, unique=None, default=None)
428 439 ui_active = Column(
429 440 "ui_active", Boolean(), nullable=True, unique=None, default=True)
430 441
431 442 def __repr__(self):
432 443 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
433 444 self.ui_key, self.ui_value)
434 445
435 446
436 447 class RepoRhodeCodeSetting(Base, BaseModel):
437 448 __tablename__ = 'repo_rhodecode_settings'
438 449 __table_args__ = (
439 450 UniqueConstraint(
440 451 'app_settings_name', 'repository_id',
441 452 name='uq_repo_rhodecode_setting_name_repo_id'),
442 453 base_table_args
443 454 )
444 455
445 456 repository_id = Column(
446 457 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
447 458 nullable=False)
448 459 app_settings_id = Column(
449 460 "app_settings_id", Integer(), nullable=False, unique=True,
450 461 default=None, primary_key=True)
451 462 app_settings_name = Column(
452 463 "app_settings_name", String(255), nullable=True, unique=None,
453 464 default=None)
454 465 _app_settings_value = Column(
455 466 "app_settings_value", String(4096), nullable=True, unique=None,
456 467 default=None)
457 468 _app_settings_type = Column(
458 469 "app_settings_type", String(255), nullable=True, unique=None,
459 470 default=None)
460 471
461 472 repository = relationship('Repository')
462 473
463 474 def __init__(self, repository_id, key='', val='', type='unicode'):
464 475 self.repository_id = repository_id
465 476 self.app_settings_name = key
466 477 self.app_settings_type = type
467 478 self.app_settings_value = val
468 479
469 480 @validates('_app_settings_value')
470 481 def validate_settings_value(self, key, val):
471 482 assert type(val) == unicode
472 483 return val
473 484
474 485 @hybrid_property
475 486 def app_settings_value(self):
476 487 v = self._app_settings_value
477 488 type_ = self.app_settings_type
478 489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
479 490 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
480 491 return converter(v)
481 492
482 493 @app_settings_value.setter
483 494 def app_settings_value(self, val):
484 495 """
485 496 Setter that will always make sure we use unicode in app_settings_value
486 497
487 498 :param val:
488 499 """
489 500 self._app_settings_value = safe_unicode(val)
490 501
491 502 @hybrid_property
492 503 def app_settings_type(self):
493 504 return self._app_settings_type
494 505
495 506 @app_settings_type.setter
496 507 def app_settings_type(self, val):
497 508 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
498 509 if val not in SETTINGS_TYPES:
499 510 raise Exception('type must be one of %s got %s'
500 511 % (SETTINGS_TYPES.keys(), val))
501 512 self._app_settings_type = val
502 513
503 514 def __unicode__(self):
504 515 return u"<%s('%s:%s:%s[%s]')>" % (
505 516 self.__class__.__name__, self.repository.repo_name,
506 517 self.app_settings_name, self.app_settings_value,
507 518 self.app_settings_type
508 519 )
509 520
510 521
511 522 class RepoRhodeCodeUi(Base, BaseModel):
512 523 __tablename__ = 'repo_rhodecode_ui'
513 524 __table_args__ = (
514 525 UniqueConstraint(
515 526 'repository_id', 'ui_section', 'ui_key',
516 527 name='uq_repo_rhodecode_ui_repository_id_section_key'),
517 528 base_table_args
518 529 )
519 530
520 531 repository_id = Column(
521 532 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
522 533 nullable=False)
523 534 ui_id = Column(
524 535 "ui_id", Integer(), nullable=False, unique=True, default=None,
525 536 primary_key=True)
526 537 ui_section = Column(
527 538 "ui_section", String(255), nullable=True, unique=None, default=None)
528 539 ui_key = Column(
529 540 "ui_key", String(255), nullable=True, unique=None, default=None)
530 541 ui_value = Column(
531 542 "ui_value", String(255), nullable=True, unique=None, default=None)
532 543 ui_active = Column(
533 544 "ui_active", Boolean(), nullable=True, unique=None, default=True)
534 545
535 546 repository = relationship('Repository')
536 547
537 548 def __repr__(self):
538 549 return '<%s[%s:%s]%s=>%s]>' % (
539 550 self.__class__.__name__, self.repository.repo_name,
540 551 self.ui_section, self.ui_key, self.ui_value)
541 552
542 553
543 554 class User(Base, BaseModel):
544 555 __tablename__ = 'users'
545 556 __table_args__ = (
546 557 UniqueConstraint('username'), UniqueConstraint('email'),
547 558 Index('u_username_idx', 'username'),
548 559 Index('u_email_idx', 'email'),
549 560 base_table_args
550 561 )
551 562
552 563 DEFAULT_USER = 'default'
553 564 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
554 565 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
555 566
556 567 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
557 568 username = Column("username", String(255), nullable=True, unique=None, default=None)
558 569 password = Column("password", String(255), nullable=True, unique=None, default=None)
559 570 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
560 571 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
561 572 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
562 573 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
563 574 _email = Column("email", String(255), nullable=True, unique=None, default=None)
564 575 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
565 576 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
566 577
567 578 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
568 579 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
569 580 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
570 581 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
571 582 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
572 583 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
573 584
574 585 user_log = relationship('UserLog')
575 586 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
576 587
577 588 repositories = relationship('Repository')
578 589 repository_groups = relationship('RepoGroup')
579 590 user_groups = relationship('UserGroup')
580 591
581 592 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
582 593 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
583 594
584 595 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
585 596 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
586 597 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
587 598
588 599 group_member = relationship('UserGroupMember', cascade='all')
589 600
590 601 notifications = relationship('UserNotification', cascade='all')
591 602 # notifications assigned to this user
592 603 user_created_notifications = relationship('Notification', cascade='all')
593 604 # comments created by this user
594 605 user_comments = relationship('ChangesetComment', cascade='all')
595 606 # user profile extra info
596 607 user_emails = relationship('UserEmailMap', cascade='all')
597 608 user_ip_map = relationship('UserIpMap', cascade='all')
598 609 user_auth_tokens = relationship('UserApiKeys', cascade='all')
599 610 user_ssh_keys = relationship('UserSshKeys', cascade='all')
600 611
601 612 # gists
602 613 user_gists = relationship('Gist', cascade='all')
603 614 # user pull requests
604 615 user_pull_requests = relationship('PullRequest', cascade='all')
605 616 # external identities
606 617 extenal_identities = relationship(
607 618 'ExternalIdentity',
608 619 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
609 620 cascade='all')
610 621 # review rules
611 622 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
612 623
613 624 def __unicode__(self):
614 625 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
615 626 self.user_id, self.username)
616 627
617 628 @hybrid_property
618 629 def email(self):
619 630 return self._email
620 631
621 632 @email.setter
622 633 def email(self, val):
623 634 self._email = val.lower() if val else None
624 635
625 636 @hybrid_property
626 637 def first_name(self):
627 638 from rhodecode.lib import helpers as h
628 639 if self.name:
629 640 return h.escape(self.name)
630 641 return self.name
631 642
632 643 @hybrid_property
633 644 def last_name(self):
634 645 from rhodecode.lib import helpers as h
635 646 if self.lastname:
636 647 return h.escape(self.lastname)
637 648 return self.lastname
638 649
639 650 @hybrid_property
640 651 def api_key(self):
641 652 """
642 653 Fetch if exist an auth-token with role ALL connected to this user
643 654 """
644 655 user_auth_token = UserApiKeys.query()\
645 656 .filter(UserApiKeys.user_id == self.user_id)\
646 657 .filter(or_(UserApiKeys.expires == -1,
647 658 UserApiKeys.expires >= time.time()))\
648 659 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
649 660 if user_auth_token:
650 661 user_auth_token = user_auth_token.api_key
651 662
652 663 return user_auth_token
653 664
654 665 @api_key.setter
655 666 def api_key(self, val):
656 667 # don't allow to set API key this is deprecated for now
657 668 self._api_key = None
658 669
659 670 @property
660 671 def reviewer_pull_requests(self):
661 672 return PullRequestReviewers.query() \
662 673 .options(joinedload(PullRequestReviewers.pull_request)) \
663 674 .filter(PullRequestReviewers.user_id == self.user_id) \
664 675 .all()
665 676
666 677 @property
667 678 def firstname(self):
668 679 # alias for future
669 680 return self.name
670 681
671 682 @property
672 683 def emails(self):
673 684 other = UserEmailMap.query()\
674 685 .filter(UserEmailMap.user == self) \
675 686 .order_by(UserEmailMap.email_id.asc()) \
676 687 .all()
677 688 return [self.email] + [x.email for x in other]
678 689
679 690 @property
680 691 def auth_tokens(self):
681 692 auth_tokens = self.get_auth_tokens()
682 693 return [x.api_key for x in auth_tokens]
683 694
684 695 def get_auth_tokens(self):
685 696 return UserApiKeys.query()\
686 697 .filter(UserApiKeys.user == self)\
687 698 .order_by(UserApiKeys.user_api_key_id.asc())\
688 699 .all()
689 700
690 701 @LazyProperty
691 702 def feed_token(self):
692 703 return self.get_feed_token()
693 704
694 705 def get_feed_token(self, cache=True):
695 706 feed_tokens = UserApiKeys.query()\
696 707 .filter(UserApiKeys.user == self)\
697 708 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
698 709 if cache:
699 710 feed_tokens = feed_tokens.options(
700 711 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
701 712
702 713 feed_tokens = feed_tokens.all()
703 714 if feed_tokens:
704 715 return feed_tokens[0].api_key
705 716 return 'NO_FEED_TOKEN_AVAILABLE'
706 717
707 718 @classmethod
708 719 def get(cls, user_id, cache=False):
709 720 if not user_id:
710 721 return
711 722
712 723 user = cls.query()
713 724 if cache:
714 725 user = user.options(
715 726 FromCache("sql_cache_short", "get_users_%s" % user_id))
716 727 return user.get(user_id)
717 728
718 729 @classmethod
719 730 def extra_valid_auth_tokens(cls, user, role=None):
720 731 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
721 732 .filter(or_(UserApiKeys.expires == -1,
722 733 UserApiKeys.expires >= time.time()))
723 734 if role:
724 735 tokens = tokens.filter(or_(UserApiKeys.role == role,
725 736 UserApiKeys.role == UserApiKeys.ROLE_ALL))
726 737 return tokens.all()
727 738
728 739 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
729 740 from rhodecode.lib import auth
730 741
731 742 log.debug('Trying to authenticate user: %s via auth-token, '
732 743 'and roles: %s', self, roles)
733 744
734 745 if not auth_token:
735 746 return False
736 747
737 748 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
738 749 tokens_q = UserApiKeys.query()\
739 750 .filter(UserApiKeys.user_id == self.user_id)\
740 751 .filter(or_(UserApiKeys.expires == -1,
741 752 UserApiKeys.expires >= time.time()))
742 753
743 754 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
744 755
745 756 crypto_backend = auth.crypto_backend()
746 757 enc_token_map = {}
747 758 plain_token_map = {}
748 759 for token in tokens_q:
749 760 if token.api_key.startswith(crypto_backend.ENC_PREF):
750 761 enc_token_map[token.api_key] = token
751 762 else:
752 763 plain_token_map[token.api_key] = token
753 764 log.debug(
754 765 'Found %s plain and %s encrypted user tokens to check for authentication',
755 766 len(plain_token_map), len(enc_token_map))
756 767
757 768 # plain token match comes first
758 769 match = plain_token_map.get(auth_token)
759 770
760 771 # check encrypted tokens now
761 772 if not match:
762 773 for token_hash, token in enc_token_map.items():
763 774 # NOTE(marcink): this is expensive to calculate, but most secure
764 775 if crypto_backend.hash_check(auth_token, token_hash):
765 776 match = token
766 777 break
767 778
768 779 if match:
769 780 log.debug('Found matching token %s', match)
770 781 if match.repo_id:
771 782 log.debug('Found scope, checking for scope match of token %s', match)
772 783 if match.repo_id == scope_repo_id:
773 784 return True
774 785 else:
775 786 log.debug(
776 787 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
777 788 'and calling scope is:%s, skipping further checks',
778 789 match.repo, scope_repo_id)
779 790 return False
780 791 else:
781 792 return True
782 793
783 794 return False
784 795
785 796 @property
786 797 def ip_addresses(self):
787 798 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
788 799 return [x.ip_addr for x in ret]
789 800
790 801 @property
791 802 def username_and_name(self):
792 803 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
793 804
794 805 @property
795 806 def username_or_name_or_email(self):
796 807 full_name = self.full_name if self.full_name is not ' ' else None
797 808 return self.username or full_name or self.email
798 809
799 810 @property
800 811 def full_name(self):
801 812 return '%s %s' % (self.first_name, self.last_name)
802 813
803 814 @property
804 815 def full_name_or_username(self):
805 816 return ('%s %s' % (self.first_name, self.last_name)
806 817 if (self.first_name and self.last_name) else self.username)
807 818
808 819 @property
809 820 def full_contact(self):
810 821 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
811 822
812 823 @property
813 824 def short_contact(self):
814 825 return '%s %s' % (self.first_name, self.last_name)
815 826
816 827 @property
817 828 def is_admin(self):
818 829 return self.admin
819 830
820 831 def AuthUser(self, **kwargs):
821 832 """
822 833 Returns instance of AuthUser for this user
823 834 """
824 835 from rhodecode.lib.auth import AuthUser
825 836 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
826 837
827 838 @hybrid_property
828 839 def user_data(self):
829 840 if not self._user_data:
830 841 return {}
831 842
832 843 try:
833 844 return json.loads(self._user_data)
834 845 except TypeError:
835 846 return {}
836 847
837 848 @user_data.setter
838 849 def user_data(self, val):
839 850 if not isinstance(val, dict):
840 851 raise Exception('user_data must be dict, got %s' % type(val))
841 852 try:
842 853 self._user_data = json.dumps(val)
843 854 except Exception:
844 855 log.error(traceback.format_exc())
845 856
846 857 @classmethod
847 858 def get_by_username(cls, username, case_insensitive=False,
848 859 cache=False, identity_cache=False):
849 860 session = Session()
850 861
851 862 if case_insensitive:
852 863 q = cls.query().filter(
853 864 func.lower(cls.username) == func.lower(username))
854 865 else:
855 866 q = cls.query().filter(cls.username == username)
856 867
857 868 if cache:
858 869 if identity_cache:
859 870 val = cls.identity_cache(session, 'username', username)
860 871 if val:
861 872 return val
862 873 else:
863 874 cache_key = "get_user_by_name_%s" % _hash_key(username)
864 875 q = q.options(
865 876 FromCache("sql_cache_short", cache_key))
866 877
867 878 return q.scalar()
868 879
869 880 @classmethod
870 881 def get_by_auth_token(cls, auth_token, cache=False):
871 882 q = UserApiKeys.query()\
872 883 .filter(UserApiKeys.api_key == auth_token)\
873 884 .filter(or_(UserApiKeys.expires == -1,
874 885 UserApiKeys.expires >= time.time()))
875 886 if cache:
876 887 q = q.options(
877 888 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
878 889
879 890 match = q.first()
880 891 if match:
881 892 return match.user
882 893
883 894 @classmethod
884 895 def get_by_email(cls, email, case_insensitive=False, cache=False):
885 896
886 897 if case_insensitive:
887 898 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
888 899
889 900 else:
890 901 q = cls.query().filter(cls.email == email)
891 902
892 903 email_key = _hash_key(email)
893 904 if cache:
894 905 q = q.options(
895 906 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
896 907
897 908 ret = q.scalar()
898 909 if ret is None:
899 910 q = UserEmailMap.query()
900 911 # try fetching in alternate email map
901 912 if case_insensitive:
902 913 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
903 914 else:
904 915 q = q.filter(UserEmailMap.email == email)
905 916 q = q.options(joinedload(UserEmailMap.user))
906 917 if cache:
907 918 q = q.options(
908 919 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
909 920 ret = getattr(q.scalar(), 'user', None)
910 921
911 922 return ret
912 923
913 924 @classmethod
914 925 def get_from_cs_author(cls, author):
915 926 """
916 927 Tries to get User objects out of commit author string
917 928
918 929 :param author:
919 930 """
920 931 from rhodecode.lib.helpers import email, author_name
921 932 # Valid email in the attribute passed, see if they're in the system
922 933 _email = email(author)
923 934 if _email:
924 935 user = cls.get_by_email(_email, case_insensitive=True)
925 936 if user:
926 937 return user
927 938 # Maybe we can match by username?
928 939 _author = author_name(author)
929 940 user = cls.get_by_username(_author, case_insensitive=True)
930 941 if user:
931 942 return user
932 943
933 944 def update_userdata(self, **kwargs):
934 945 usr = self
935 946 old = usr.user_data
936 947 old.update(**kwargs)
937 948 usr.user_data = old
938 949 Session().add(usr)
939 950 log.debug('updated userdata with ', kwargs)
940 951
941 952 def update_lastlogin(self):
942 953 """Update user lastlogin"""
943 954 self.last_login = datetime.datetime.now()
944 955 Session().add(self)
945 956 log.debug('updated user %s lastlogin', self.username)
946 957
947 958 def update_password(self, new_password):
948 959 from rhodecode.lib.auth import get_crypt_password
949 960
950 961 self.password = get_crypt_password(new_password)
951 962 Session().add(self)
952 963
953 964 @classmethod
954 965 def get_first_super_admin(cls):
955 966 user = User.query()\
956 967 .filter(User.admin == true()) \
957 968 .order_by(User.user_id.asc()) \
958 969 .first()
959 970
960 971 if user is None:
961 972 raise Exception('FATAL: Missing administrative account!')
962 973 return user
963 974
964 975 @classmethod
965 976 def get_all_super_admins(cls, only_active=False):
966 977 """
967 978 Returns all admin accounts sorted by username
968 979 """
969 980 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
970 981 if only_active:
971 982 qry = qry.filter(User.active == true())
972 983 return qry.all()
973 984
974 985 @classmethod
975 986 def get_default_user(cls, cache=False, refresh=False):
976 987 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
977 988 if user is None:
978 989 raise Exception('FATAL: Missing default account!')
979 990 if refresh:
980 991 # The default user might be based on outdated state which
981 992 # has been loaded from the cache.
982 993 # A call to refresh() ensures that the
983 994 # latest state from the database is used.
984 995 Session().refresh(user)
985 996 return user
986 997
987 998 def _get_default_perms(self, user, suffix=''):
988 999 from rhodecode.model.permission import PermissionModel
989 1000 return PermissionModel().get_default_perms(user.user_perms, suffix)
990 1001
991 1002 def get_default_perms(self, suffix=''):
992 1003 return self._get_default_perms(self, suffix)
993 1004
994 1005 def get_api_data(self, include_secrets=False, details='full'):
995 1006 """
996 1007 Common function for generating user related data for API
997 1008
998 1009 :param include_secrets: By default secrets in the API data will be replaced
999 1010 by a placeholder value to prevent exposing this data by accident. In case
1000 1011 this data shall be exposed, set this flag to ``True``.
1001 1012
1002 1013 :param details: details can be 'basic|full' basic gives only a subset of
1003 1014 the available user information that includes user_id, name and emails.
1004 1015 """
1005 1016 user = self
1006 1017 user_data = self.user_data
1007 1018 data = {
1008 1019 'user_id': user.user_id,
1009 1020 'username': user.username,
1010 1021 'firstname': user.name,
1011 1022 'lastname': user.lastname,
1012 1023 'email': user.email,
1013 1024 'emails': user.emails,
1014 1025 }
1015 1026 if details == 'basic':
1016 1027 return data
1017 1028
1018 1029 auth_token_length = 40
1019 1030 auth_token_replacement = '*' * auth_token_length
1020 1031
1021 1032 extras = {
1022 1033 'auth_tokens': [auth_token_replacement],
1023 1034 'active': user.active,
1024 1035 'admin': user.admin,
1025 1036 'extern_type': user.extern_type,
1026 1037 'extern_name': user.extern_name,
1027 1038 'last_login': user.last_login,
1028 1039 'last_activity': user.last_activity,
1029 1040 'ip_addresses': user.ip_addresses,
1030 1041 'language': user_data.get('language')
1031 1042 }
1032 1043 data.update(extras)
1033 1044
1034 1045 if include_secrets:
1035 1046 data['auth_tokens'] = user.auth_tokens
1036 1047 return data
1037 1048
1038 1049 def __json__(self):
1039 1050 data = {
1040 1051 'full_name': self.full_name,
1041 1052 'full_name_or_username': self.full_name_or_username,
1042 1053 'short_contact': self.short_contact,
1043 1054 'full_contact': self.full_contact,
1044 1055 }
1045 1056 data.update(self.get_api_data())
1046 1057 return data
1047 1058
1048 1059
1049 1060 class UserApiKeys(Base, BaseModel):
1050 1061 __tablename__ = 'user_api_keys'
1051 1062 __table_args__ = (
1052 1063 Index('uak_api_key_idx', 'api_key', unique=True),
1053 1064 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1054 1065 base_table_args
1055 1066 )
1056 1067 __mapper_args__ = {}
1057 1068
1058 1069 # ApiKey role
1059 1070 ROLE_ALL = 'token_role_all'
1060 1071 ROLE_HTTP = 'token_role_http'
1061 1072 ROLE_VCS = 'token_role_vcs'
1062 1073 ROLE_API = 'token_role_api'
1063 1074 ROLE_FEED = 'token_role_feed'
1064 1075 ROLE_PASSWORD_RESET = 'token_password_reset'
1065 1076
1066 1077 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1067 1078
1068 1079 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1069 1080 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1070 1081 api_key = Column("api_key", String(255), nullable=False, unique=True)
1071 1082 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1072 1083 expires = Column('expires', Float(53), nullable=False)
1073 1084 role = Column('role', String(255), nullable=True)
1074 1085 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1075 1086
1076 1087 # scope columns
1077 1088 repo_id = Column(
1078 1089 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1079 1090 nullable=True, unique=None, default=None)
1080 1091 repo = relationship('Repository', lazy='joined')
1081 1092
1082 1093 repo_group_id = Column(
1083 1094 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1084 1095 nullable=True, unique=None, default=None)
1085 1096 repo_group = relationship('RepoGroup', lazy='joined')
1086 1097
1087 1098 user = relationship('User', lazy='joined')
1088 1099
1089 1100 def __unicode__(self):
1090 1101 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1091 1102
1092 1103 def __json__(self):
1093 1104 data = {
1094 1105 'auth_token': self.api_key,
1095 1106 'role': self.role,
1096 1107 'scope': self.scope_humanized,
1097 1108 'expired': self.expired
1098 1109 }
1099 1110 return data
1100 1111
1101 1112 def get_api_data(self, include_secrets=False):
1102 1113 data = self.__json__()
1103 1114 if include_secrets:
1104 1115 return data
1105 1116 else:
1106 1117 data['auth_token'] = self.token_obfuscated
1107 1118 return data
1108 1119
1109 1120 @hybrid_property
1110 1121 def description_safe(self):
1111 1122 from rhodecode.lib import helpers as h
1112 1123 return h.escape(self.description)
1113 1124
1114 1125 @property
1115 1126 def expired(self):
1116 1127 if self.expires == -1:
1117 1128 return False
1118 1129 return time.time() > self.expires
1119 1130
1120 1131 @classmethod
1121 1132 def _get_role_name(cls, role):
1122 1133 return {
1123 1134 cls.ROLE_ALL: _('all'),
1124 1135 cls.ROLE_HTTP: _('http/web interface'),
1125 1136 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1126 1137 cls.ROLE_API: _('api calls'),
1127 1138 cls.ROLE_FEED: _('feed access'),
1128 1139 }.get(role, role)
1129 1140
1130 1141 @property
1131 1142 def role_humanized(self):
1132 1143 return self._get_role_name(self.role)
1133 1144
1134 1145 def _get_scope(self):
1135 1146 if self.repo:
1136 1147 return 'Repository: {}'.format(self.repo.repo_name)
1137 1148 if self.repo_group:
1138 1149 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1139 1150 return 'Global'
1140 1151
1141 1152 @property
1142 1153 def scope_humanized(self):
1143 1154 return self._get_scope()
1144 1155
1145 1156 @property
1146 1157 def token_obfuscated(self):
1147 1158 if self.api_key:
1148 1159 return self.api_key[:4] + "****"
1149 1160
1150 1161
1151 1162 class UserEmailMap(Base, BaseModel):
1152 1163 __tablename__ = 'user_email_map'
1153 1164 __table_args__ = (
1154 1165 Index('uem_email_idx', 'email'),
1155 1166 UniqueConstraint('email'),
1156 1167 base_table_args
1157 1168 )
1158 1169 __mapper_args__ = {}
1159 1170
1160 1171 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1161 1172 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1162 1173 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1163 1174 user = relationship('User', lazy='joined')
1164 1175
1165 1176 @validates('_email')
1166 1177 def validate_email(self, key, email):
1167 1178 # check if this email is not main one
1168 1179 main_email = Session().query(User).filter(User.email == email).scalar()
1169 1180 if main_email is not None:
1170 1181 raise AttributeError('email %s is present is user table' % email)
1171 1182 return email
1172 1183
1173 1184 @hybrid_property
1174 1185 def email(self):
1175 1186 return self._email
1176 1187
1177 1188 @email.setter
1178 1189 def email(self, val):
1179 1190 self._email = val.lower() if val else None
1180 1191
1181 1192
1182 1193 class UserIpMap(Base, BaseModel):
1183 1194 __tablename__ = 'user_ip_map'
1184 1195 __table_args__ = (
1185 1196 UniqueConstraint('user_id', 'ip_addr'),
1186 1197 base_table_args
1187 1198 )
1188 1199 __mapper_args__ = {}
1189 1200
1190 1201 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1191 1202 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1192 1203 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1193 1204 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1194 1205 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1195 1206 user = relationship('User', lazy='joined')
1196 1207
1197 1208 @hybrid_property
1198 1209 def description_safe(self):
1199 1210 from rhodecode.lib import helpers as h
1200 1211 return h.escape(self.description)
1201 1212
1202 1213 @classmethod
1203 1214 def _get_ip_range(cls, ip_addr):
1204 1215 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1205 1216 return [str(net.network_address), str(net.broadcast_address)]
1206 1217
1207 1218 def __json__(self):
1208 1219 return {
1209 1220 'ip_addr': self.ip_addr,
1210 1221 'ip_range': self._get_ip_range(self.ip_addr),
1211 1222 }
1212 1223
1213 1224 def __unicode__(self):
1214 1225 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1215 1226 self.user_id, self.ip_addr)
1216 1227
1217 1228
1218 1229 class UserSshKeys(Base, BaseModel):
1219 1230 __tablename__ = 'user_ssh_keys'
1220 1231 __table_args__ = (
1221 1232 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1222 1233
1223 1234 UniqueConstraint('ssh_key_fingerprint'),
1224 1235
1225 1236 base_table_args
1226 1237 )
1227 1238 __mapper_args__ = {}
1228 1239
1229 1240 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1230 1241 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1231 1242 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1232 1243
1233 1244 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1234 1245
1235 1246 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1236 1247 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1237 1248 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1238 1249
1239 1250 user = relationship('User', lazy='joined')
1240 1251
1241 1252 def __json__(self):
1242 1253 data = {
1243 1254 'ssh_fingerprint': self.ssh_key_fingerprint,
1244 1255 'description': self.description,
1245 1256 'created_on': self.created_on
1246 1257 }
1247 1258 return data
1248 1259
1249 1260 def get_api_data(self):
1250 1261 data = self.__json__()
1251 1262 return data
1252 1263
1253 1264
1254 1265 class UserLog(Base, BaseModel):
1255 1266 __tablename__ = 'user_logs'
1256 1267 __table_args__ = (
1257 1268 base_table_args,
1258 1269 )
1259 1270
1260 1271 VERSION_1 = 'v1'
1261 1272 VERSION_2 = 'v2'
1262 1273 VERSIONS = [VERSION_1, VERSION_2]
1263 1274
1264 1275 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1265 1276 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1266 1277 username = Column("username", String(255), nullable=True, unique=None, default=None)
1267 1278 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1268 1279 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1269 1280 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1270 1281 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1271 1282 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1272 1283
1273 1284 version = Column("version", String(255), nullable=True, default=VERSION_1)
1274 1285 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1275 1286 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1276 1287
1277 1288 def __unicode__(self):
1278 1289 return u"<%s('id:%s:%s')>" % (
1279 1290 self.__class__.__name__, self.repository_name, self.action)
1280 1291
1281 1292 def __json__(self):
1282 1293 return {
1283 1294 'user_id': self.user_id,
1284 1295 'username': self.username,
1285 1296 'repository_id': self.repository_id,
1286 1297 'repository_name': self.repository_name,
1287 1298 'user_ip': self.user_ip,
1288 1299 'action_date': self.action_date,
1289 1300 'action': self.action,
1290 1301 }
1291 1302
1292 1303 @hybrid_property
1293 1304 def entry_id(self):
1294 1305 return self.user_log_id
1295 1306
1296 1307 @property
1297 1308 def action_as_day(self):
1298 1309 return datetime.date(*self.action_date.timetuple()[:3])
1299 1310
1300 1311 user = relationship('User')
1301 1312 repository = relationship('Repository', cascade='')
1302 1313
1303 1314
1304 1315 class UserGroup(Base, BaseModel):
1305 1316 __tablename__ = 'users_groups'
1306 1317 __table_args__ = (
1307 1318 base_table_args,
1308 1319 )
1309 1320
1310 1321 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1311 1322 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1312 1323 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1313 1324 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1314 1325 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1315 1326 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1316 1327 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1317 1328 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1318 1329
1319 1330 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1320 1331 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1321 1332 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1322 1333 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1323 1334 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1324 1335 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1325 1336
1326 1337 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1327 1338 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1328 1339
1329 1340 @classmethod
1330 1341 def _load_group_data(cls, column):
1331 1342 if not column:
1332 1343 return {}
1333 1344
1334 1345 try:
1335 1346 return json.loads(column) or {}
1336 1347 except TypeError:
1337 1348 return {}
1338 1349
1339 1350 @hybrid_property
1340 1351 def description_safe(self):
1341 1352 from rhodecode.lib import helpers as h
1342 1353 return h.escape(self.user_group_description)
1343 1354
1344 1355 @hybrid_property
1345 1356 def group_data(self):
1346 1357 return self._load_group_data(self._group_data)
1347 1358
1348 1359 @group_data.expression
1349 1360 def group_data(self, **kwargs):
1350 1361 return self._group_data
1351 1362
1352 1363 @group_data.setter
1353 1364 def group_data(self, val):
1354 1365 try:
1355 1366 self._group_data = json.dumps(val)
1356 1367 except Exception:
1357 1368 log.error(traceback.format_exc())
1358 1369
1359 1370 @classmethod
1360 1371 def _load_sync(cls, group_data):
1361 1372 if group_data:
1362 1373 return group_data.get('extern_type')
1363 1374
1364 1375 @property
1365 1376 def sync(self):
1366 1377 return self._load_sync(self.group_data)
1367 1378
1368 1379 def __unicode__(self):
1369 1380 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1370 1381 self.users_group_id,
1371 1382 self.users_group_name)
1372 1383
1373 1384 @classmethod
1374 1385 def get_by_group_name(cls, group_name, cache=False,
1375 1386 case_insensitive=False):
1376 1387 if case_insensitive:
1377 1388 q = cls.query().filter(func.lower(cls.users_group_name) ==
1378 1389 func.lower(group_name))
1379 1390
1380 1391 else:
1381 1392 q = cls.query().filter(cls.users_group_name == group_name)
1382 1393 if cache:
1383 1394 q = q.options(
1384 1395 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1385 1396 return q.scalar()
1386 1397
1387 1398 @classmethod
1388 1399 def get(cls, user_group_id, cache=False):
1389 1400 if not user_group_id:
1390 1401 return
1391 1402
1392 1403 user_group = cls.query()
1393 1404 if cache:
1394 1405 user_group = user_group.options(
1395 1406 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1396 1407 return user_group.get(user_group_id)
1397 1408
1398 1409 def permissions(self, with_admins=True, with_owner=True,
1399 1410 expand_from_user_groups=False):
1400 1411 """
1401 1412 Permissions for user groups
1402 1413 """
1403 1414 _admin_perm = 'usergroup.admin'
1404 1415
1405 1416 owner_row = []
1406 1417 if with_owner:
1407 1418 usr = AttributeDict(self.user.get_dict())
1408 1419 usr.owner_row = True
1409 1420 usr.permission = _admin_perm
1410 1421 owner_row.append(usr)
1411 1422
1412 1423 super_admin_ids = []
1413 1424 super_admin_rows = []
1414 1425 if with_admins:
1415 1426 for usr in User.get_all_super_admins():
1416 1427 super_admin_ids.append(usr.user_id)
1417 1428 # if this admin is also owner, don't double the record
1418 1429 if usr.user_id == owner_row[0].user_id:
1419 1430 owner_row[0].admin_row = True
1420 1431 else:
1421 1432 usr = AttributeDict(usr.get_dict())
1422 1433 usr.admin_row = True
1423 1434 usr.permission = _admin_perm
1424 1435 super_admin_rows.append(usr)
1425 1436
1426 1437 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1427 1438 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1428 1439 joinedload(UserUserGroupToPerm.user),
1429 1440 joinedload(UserUserGroupToPerm.permission),)
1430 1441
1431 1442 # get owners and admins and permissions. We do a trick of re-writing
1432 1443 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1433 1444 # has a global reference and changing one object propagates to all
1434 1445 # others. This means if admin is also an owner admin_row that change
1435 1446 # would propagate to both objects
1436 1447 perm_rows = []
1437 1448 for _usr in q.all():
1438 1449 usr = AttributeDict(_usr.user.get_dict())
1439 1450 # if this user is also owner/admin, mark as duplicate record
1440 1451 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1441 1452 usr.duplicate_perm = True
1442 1453 usr.permission = _usr.permission.permission_name
1443 1454 perm_rows.append(usr)
1444 1455
1445 1456 # filter the perm rows by 'default' first and then sort them by
1446 1457 # admin,write,read,none permissions sorted again alphabetically in
1447 1458 # each group
1448 1459 perm_rows = sorted(perm_rows, key=display_user_sort)
1449 1460
1450 1461 user_groups_rows = []
1451 1462 if expand_from_user_groups:
1452 1463 for ug in self.permission_user_groups(with_members=True):
1453 1464 for user_data in ug.members:
1454 1465 user_groups_rows.append(user_data)
1455 1466
1456 1467 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1457 1468
1458 1469 def permission_user_groups(self, with_members=False):
1459 1470 q = UserGroupUserGroupToPerm.query()\
1460 1471 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1461 1472 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1462 1473 joinedload(UserGroupUserGroupToPerm.target_user_group),
1463 1474 joinedload(UserGroupUserGroupToPerm.permission),)
1464 1475
1465 1476 perm_rows = []
1466 1477 for _user_group in q.all():
1467 1478 entry = AttributeDict(_user_group.user_group.get_dict())
1468 1479 entry.permission = _user_group.permission.permission_name
1469 1480 if with_members:
1470 1481 entry.members = [x.user.get_dict()
1471 1482 for x in _user_group.user_group.members]
1472 1483 perm_rows.append(entry)
1473 1484
1474 1485 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1475 1486 return perm_rows
1476 1487
1477 1488 def _get_default_perms(self, user_group, suffix=''):
1478 1489 from rhodecode.model.permission import PermissionModel
1479 1490 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1480 1491
1481 1492 def get_default_perms(self, suffix=''):
1482 1493 return self._get_default_perms(self, suffix)
1483 1494
1484 1495 def get_api_data(self, with_group_members=True, include_secrets=False):
1485 1496 """
1486 1497 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1487 1498 basically forwarded.
1488 1499
1489 1500 """
1490 1501 user_group = self
1491 1502 data = {
1492 1503 'users_group_id': user_group.users_group_id,
1493 1504 'group_name': user_group.users_group_name,
1494 1505 'group_description': user_group.user_group_description,
1495 1506 'active': user_group.users_group_active,
1496 1507 'owner': user_group.user.username,
1497 1508 'sync': user_group.sync,
1498 1509 'owner_email': user_group.user.email,
1499 1510 }
1500 1511
1501 1512 if with_group_members:
1502 1513 users = []
1503 1514 for user in user_group.members:
1504 1515 user = user.user
1505 1516 users.append(user.get_api_data(include_secrets=include_secrets))
1506 1517 data['users'] = users
1507 1518
1508 1519 return data
1509 1520
1510 1521
1511 1522 class UserGroupMember(Base, BaseModel):
1512 1523 __tablename__ = 'users_groups_members'
1513 1524 __table_args__ = (
1514 1525 base_table_args,
1515 1526 )
1516 1527
1517 1528 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1518 1529 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1519 1530 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1520 1531
1521 1532 user = relationship('User', lazy='joined')
1522 1533 users_group = relationship('UserGroup')
1523 1534
1524 1535 def __init__(self, gr_id='', u_id=''):
1525 1536 self.users_group_id = gr_id
1526 1537 self.user_id = u_id
1527 1538
1528 1539
1529 1540 class RepositoryField(Base, BaseModel):
1530 1541 __tablename__ = 'repositories_fields'
1531 1542 __table_args__ = (
1532 1543 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1533 1544 base_table_args,
1534 1545 )
1535 1546
1536 1547 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1537 1548
1538 1549 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1539 1550 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1540 1551 field_key = Column("field_key", String(250))
1541 1552 field_label = Column("field_label", String(1024), nullable=False)
1542 1553 field_value = Column("field_value", String(10000), nullable=False)
1543 1554 field_desc = Column("field_desc", String(1024), nullable=False)
1544 1555 field_type = Column("field_type", String(255), nullable=False, unique=None)
1545 1556 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1546 1557
1547 1558 repository = relationship('Repository')
1548 1559
1549 1560 @property
1550 1561 def field_key_prefixed(self):
1551 1562 return 'ex_%s' % self.field_key
1552 1563
1553 1564 @classmethod
1554 1565 def un_prefix_key(cls, key):
1555 1566 if key.startswith(cls.PREFIX):
1556 1567 return key[len(cls.PREFIX):]
1557 1568 return key
1558 1569
1559 1570 @classmethod
1560 1571 def get_by_key_name(cls, key, repo):
1561 1572 row = cls.query()\
1562 1573 .filter(cls.repository == repo)\
1563 1574 .filter(cls.field_key == key).scalar()
1564 1575 return row
1565 1576
1566 1577
1567 1578 class Repository(Base, BaseModel):
1568 1579 __tablename__ = 'repositories'
1569 1580 __table_args__ = (
1570 1581 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1571 1582 base_table_args,
1572 1583 )
1573 1584 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1574 1585 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1575 1586 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1576 1587
1577 1588 STATE_CREATED = 'repo_state_created'
1578 1589 STATE_PENDING = 'repo_state_pending'
1579 1590 STATE_ERROR = 'repo_state_error'
1580 1591
1581 1592 LOCK_AUTOMATIC = 'lock_auto'
1582 1593 LOCK_API = 'lock_api'
1583 1594 LOCK_WEB = 'lock_web'
1584 1595 LOCK_PULL = 'lock_pull'
1585 1596
1586 1597 NAME_SEP = URL_SEP
1587 1598
1588 1599 repo_id = Column(
1589 1600 "repo_id", Integer(), nullable=False, unique=True, default=None,
1590 1601 primary_key=True)
1591 1602 _repo_name = Column(
1592 1603 "repo_name", Text(), nullable=False, default=None)
1593 1604 _repo_name_hash = Column(
1594 1605 "repo_name_hash", String(255), nullable=False, unique=True)
1595 1606 repo_state = Column("repo_state", String(255), nullable=True)
1596 1607
1597 1608 clone_uri = Column(
1598 1609 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1599 1610 default=None)
1600 1611 push_uri = Column(
1601 1612 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1602 1613 default=None)
1603 1614 repo_type = Column(
1604 1615 "repo_type", String(255), nullable=False, unique=False, default=None)
1605 1616 user_id = Column(
1606 1617 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1607 1618 unique=False, default=None)
1608 1619 private = Column(
1609 1620 "private", Boolean(), nullable=True, unique=None, default=None)
1610 1621 archived = Column(
1611 1622 "archived", Boolean(), nullable=True, unique=None, default=None)
1612 1623 enable_statistics = Column(
1613 1624 "statistics", Boolean(), nullable=True, unique=None, default=True)
1614 1625 enable_downloads = Column(
1615 1626 "downloads", Boolean(), nullable=True, unique=None, default=True)
1616 1627 description = Column(
1617 1628 "description", String(10000), nullable=True, unique=None, default=None)
1618 1629 created_on = Column(
1619 1630 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1620 1631 default=datetime.datetime.now)
1621 1632 updated_on = Column(
1622 1633 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1623 1634 default=datetime.datetime.now)
1624 1635 _landing_revision = Column(
1625 1636 "landing_revision", String(255), nullable=False, unique=False,
1626 1637 default=None)
1627 1638 enable_locking = Column(
1628 1639 "enable_locking", Boolean(), nullable=False, unique=None,
1629 1640 default=False)
1630 1641 _locked = Column(
1631 1642 "locked", String(255), nullable=True, unique=False, default=None)
1632 1643 _changeset_cache = Column(
1633 1644 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1634 1645
1635 1646 fork_id = Column(
1636 1647 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1637 1648 nullable=True, unique=False, default=None)
1638 1649 group_id = Column(
1639 1650 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1640 1651 unique=False, default=None)
1641 1652
1642 1653 user = relationship('User', lazy='joined')
1643 1654 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1644 1655 group = relationship('RepoGroup', lazy='joined')
1645 1656 repo_to_perm = relationship(
1646 1657 'UserRepoToPerm', cascade='all',
1647 1658 order_by='UserRepoToPerm.repo_to_perm_id')
1648 1659 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1649 1660 stats = relationship('Statistics', cascade='all', uselist=False)
1650 1661
1651 1662 followers = relationship(
1652 1663 'UserFollowing',
1653 1664 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1654 1665 cascade='all')
1655 1666 extra_fields = relationship(
1656 1667 'RepositoryField', cascade="all, delete, delete-orphan")
1657 1668 logs = relationship('UserLog')
1658 1669 comments = relationship(
1659 1670 'ChangesetComment', cascade="all, delete, delete-orphan")
1660 1671 pull_requests_source = relationship(
1661 1672 'PullRequest',
1662 1673 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1663 1674 cascade="all, delete, delete-orphan")
1664 1675 pull_requests_target = relationship(
1665 1676 'PullRequest',
1666 1677 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1667 1678 cascade="all, delete, delete-orphan")
1668 1679 ui = relationship('RepoRhodeCodeUi', cascade="all")
1669 1680 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1670 1681 integrations = relationship('Integration',
1671 1682 cascade="all, delete, delete-orphan")
1672 1683
1673 1684 scoped_tokens = relationship('UserApiKeys', cascade="all")
1674 1685
1675 1686 def __unicode__(self):
1676 1687 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1677 1688 safe_unicode(self.repo_name))
1678 1689
1679 1690 @hybrid_property
1680 1691 def description_safe(self):
1681 1692 from rhodecode.lib import helpers as h
1682 1693 return h.escape(self.description)
1683 1694
1684 1695 @hybrid_property
1685 1696 def landing_rev(self):
1686 1697 # always should return [rev_type, rev]
1687 1698 if self._landing_revision:
1688 1699 _rev_info = self._landing_revision.split(':')
1689 1700 if len(_rev_info) < 2:
1690 1701 _rev_info.insert(0, 'rev')
1691 1702 return [_rev_info[0], _rev_info[1]]
1692 1703 return [None, None]
1693 1704
1694 1705 @landing_rev.setter
1695 1706 def landing_rev(self, val):
1696 1707 if ':' not in val:
1697 1708 raise ValueError('value must be delimited with `:` and consist '
1698 1709 'of <rev_type>:<rev>, got %s instead' % val)
1699 1710 self._landing_revision = val
1700 1711
1701 1712 @hybrid_property
1702 1713 def locked(self):
1703 1714 if self._locked:
1704 1715 user_id, timelocked, reason = self._locked.split(':')
1705 1716 lock_values = int(user_id), timelocked, reason
1706 1717 else:
1707 1718 lock_values = [None, None, None]
1708 1719 return lock_values
1709 1720
1710 1721 @locked.setter
1711 1722 def locked(self, val):
1712 1723 if val and isinstance(val, (list, tuple)):
1713 1724 self._locked = ':'.join(map(str, val))
1714 1725 else:
1715 1726 self._locked = None
1716 1727
1717 1728 @hybrid_property
1718 1729 def changeset_cache(self):
1719 1730 from rhodecode.lib.vcs.backends.base import EmptyCommit
1720 1731 dummy = EmptyCommit().__json__()
1721 1732 if not self._changeset_cache:
1722 1733 return dummy
1723 1734 try:
1724 1735 return json.loads(self._changeset_cache)
1725 1736 except TypeError:
1726 1737 return dummy
1727 1738 except Exception:
1728 1739 log.error(traceback.format_exc())
1729 1740 return dummy
1730 1741
1731 1742 @changeset_cache.setter
1732 1743 def changeset_cache(self, val):
1733 1744 try:
1734 1745 self._changeset_cache = json.dumps(val)
1735 1746 except Exception:
1736 1747 log.error(traceback.format_exc())
1737 1748
1738 1749 @hybrid_property
1739 1750 def repo_name(self):
1740 1751 return self._repo_name
1741 1752
1742 1753 @repo_name.setter
1743 1754 def repo_name(self, value):
1744 1755 self._repo_name = value
1745 1756 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1746 1757
1747 1758 @classmethod
1748 1759 def normalize_repo_name(cls, repo_name):
1749 1760 """
1750 1761 Normalizes os specific repo_name to the format internally stored inside
1751 1762 database using URL_SEP
1752 1763
1753 1764 :param cls:
1754 1765 :param repo_name:
1755 1766 """
1756 1767 return cls.NAME_SEP.join(repo_name.split(os.sep))
1757 1768
1758 1769 @classmethod
1759 1770 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1760 1771 session = Session()
1761 1772 q = session.query(cls).filter(cls.repo_name == repo_name)
1762 1773
1763 1774 if cache:
1764 1775 if identity_cache:
1765 1776 val = cls.identity_cache(session, 'repo_name', repo_name)
1766 1777 if val:
1767 1778 return val
1768 1779 else:
1769 1780 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1770 1781 q = q.options(
1771 1782 FromCache("sql_cache_short", cache_key))
1772 1783
1773 1784 return q.scalar()
1774 1785
1775 1786 @classmethod
1776 1787 def get_by_id_or_repo_name(cls, repoid):
1777 1788 if isinstance(repoid, (int, long)):
1778 1789 try:
1779 1790 repo = cls.get(repoid)
1780 1791 except ValueError:
1781 1792 repo = None
1782 1793 else:
1783 1794 repo = cls.get_by_repo_name(repoid)
1784 1795 return repo
1785 1796
1786 1797 @classmethod
1787 1798 def get_by_full_path(cls, repo_full_path):
1788 1799 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1789 1800 repo_name = cls.normalize_repo_name(repo_name)
1790 1801 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1791 1802
1792 1803 @classmethod
1793 1804 def get_repo_forks(cls, repo_id):
1794 1805 return cls.query().filter(Repository.fork_id == repo_id)
1795 1806
1796 1807 @classmethod
1797 1808 def base_path(cls):
1798 1809 """
1799 1810 Returns base path when all repos are stored
1800 1811
1801 1812 :param cls:
1802 1813 """
1803 1814 q = Session().query(RhodeCodeUi)\
1804 1815 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1805 1816 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1806 1817 return q.one().ui_value
1807 1818
1808 1819 @classmethod
1809 1820 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1810 1821 case_insensitive=True, archived=False):
1811 1822 q = Repository.query()
1812 1823
1813 1824 if not archived:
1814 1825 q = q.filter(Repository.archived.isnot(true()))
1815 1826
1816 1827 if not isinstance(user_id, Optional):
1817 1828 q = q.filter(Repository.user_id == user_id)
1818 1829
1819 1830 if not isinstance(group_id, Optional):
1820 1831 q = q.filter(Repository.group_id == group_id)
1821 1832
1822 1833 if case_insensitive:
1823 1834 q = q.order_by(func.lower(Repository.repo_name))
1824 1835 else:
1825 1836 q = q.order_by(Repository.repo_name)
1826 1837
1827 1838 return q.all()
1828 1839
1829 1840 @property
1830 1841 def forks(self):
1831 1842 """
1832 1843 Return forks of this repo
1833 1844 """
1834 1845 return Repository.get_repo_forks(self.repo_id)
1835 1846
1836 1847 @property
1837 1848 def parent(self):
1838 1849 """
1839 1850 Returns fork parent
1840 1851 """
1841 1852 return self.fork
1842 1853
1843 1854 @property
1844 1855 def just_name(self):
1845 1856 return self.repo_name.split(self.NAME_SEP)[-1]
1846 1857
1847 1858 @property
1848 1859 def groups_with_parents(self):
1849 1860 groups = []
1850 1861 if self.group is None:
1851 1862 return groups
1852 1863
1853 1864 cur_gr = self.group
1854 1865 groups.insert(0, cur_gr)
1855 1866 while 1:
1856 1867 gr = getattr(cur_gr, 'parent_group', None)
1857 1868 cur_gr = cur_gr.parent_group
1858 1869 if gr is None:
1859 1870 break
1860 1871 groups.insert(0, gr)
1861 1872
1862 1873 return groups
1863 1874
1864 1875 @property
1865 1876 def groups_and_repo(self):
1866 1877 return self.groups_with_parents, self
1867 1878
1868 1879 @LazyProperty
1869 1880 def repo_path(self):
1870 1881 """
1871 1882 Returns base full path for that repository means where it actually
1872 1883 exists on a filesystem
1873 1884 """
1874 1885 q = Session().query(RhodeCodeUi).filter(
1875 1886 RhodeCodeUi.ui_key == self.NAME_SEP)
1876 1887 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1877 1888 return q.one().ui_value
1878 1889
1879 1890 @property
1880 1891 def repo_full_path(self):
1881 1892 p = [self.repo_path]
1882 1893 # we need to split the name by / since this is how we store the
1883 1894 # names in the database, but that eventually needs to be converted
1884 1895 # into a valid system path
1885 1896 p += self.repo_name.split(self.NAME_SEP)
1886 1897 return os.path.join(*map(safe_unicode, p))
1887 1898
1888 1899 @property
1889 1900 def cache_keys(self):
1890 1901 """
1891 1902 Returns associated cache keys for that repo
1892 1903 """
1893 1904 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1894 1905 repo_id=self.repo_id)
1895 1906 return CacheKey.query()\
1896 1907 .filter(CacheKey.cache_args == invalidation_namespace)\
1897 1908 .order_by(CacheKey.cache_key)\
1898 1909 .all()
1899 1910
1900 1911 @property
1901 1912 def cached_diffs_relative_dir(self):
1902 1913 """
1903 1914 Return a relative to the repository store path of cached diffs
1904 1915 used for safe display for users, who shouldn't know the absolute store
1905 1916 path
1906 1917 """
1907 1918 return os.path.join(
1908 1919 os.path.dirname(self.repo_name),
1909 1920 self.cached_diffs_dir.split(os.path.sep)[-1])
1910 1921
1911 1922 @property
1912 1923 def cached_diffs_dir(self):
1913 1924 path = self.repo_full_path
1914 1925 return os.path.join(
1915 1926 os.path.dirname(path),
1916 1927 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1917 1928
1918 1929 def cached_diffs(self):
1919 1930 diff_cache_dir = self.cached_diffs_dir
1920 1931 if os.path.isdir(diff_cache_dir):
1921 1932 return os.listdir(diff_cache_dir)
1922 1933 return []
1923 1934
1924 1935 def shadow_repos(self):
1925 1936 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1926 1937 return [
1927 1938 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1928 1939 if x.startswith(shadow_repos_pattern)]
1929 1940
1930 1941 def get_new_name(self, repo_name):
1931 1942 """
1932 1943 returns new full repository name based on assigned group and new new
1933 1944
1934 1945 :param group_name:
1935 1946 """
1936 1947 path_prefix = self.group.full_path_splitted if self.group else []
1937 1948 return self.NAME_SEP.join(path_prefix + [repo_name])
1938 1949
1939 1950 @property
1940 1951 def _config(self):
1941 1952 """
1942 1953 Returns db based config object.
1943 1954 """
1944 1955 from rhodecode.lib.utils import make_db_config
1945 1956 return make_db_config(clear_session=False, repo=self)
1946 1957
1947 1958 def permissions(self, with_admins=True, with_owner=True,
1948 1959 expand_from_user_groups=False):
1949 1960 """
1950 1961 Permissions for repositories
1951 1962 """
1952 1963 _admin_perm = 'repository.admin'
1953 1964
1954 1965 owner_row = []
1955 1966 if with_owner:
1956 1967 usr = AttributeDict(self.user.get_dict())
1957 1968 usr.owner_row = True
1958 1969 usr.permission = _admin_perm
1959 1970 usr.permission_id = None
1960 1971 owner_row.append(usr)
1961 1972
1962 1973 super_admin_ids = []
1963 1974 super_admin_rows = []
1964 1975 if with_admins:
1965 1976 for usr in User.get_all_super_admins():
1966 1977 super_admin_ids.append(usr.user_id)
1967 1978 # if this admin is also owner, don't double the record
1968 1979 if usr.user_id == owner_row[0].user_id:
1969 1980 owner_row[0].admin_row = True
1970 1981 else:
1971 1982 usr = AttributeDict(usr.get_dict())
1972 1983 usr.admin_row = True
1973 1984 usr.permission = _admin_perm
1974 1985 usr.permission_id = None
1975 1986 super_admin_rows.append(usr)
1976 1987
1977 1988 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1978 1989 q = q.options(joinedload(UserRepoToPerm.repository),
1979 1990 joinedload(UserRepoToPerm.user),
1980 1991 joinedload(UserRepoToPerm.permission),)
1981 1992
1982 1993 # get owners and admins and permissions. We do a trick of re-writing
1983 1994 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1984 1995 # has a global reference and changing one object propagates to all
1985 1996 # others. This means if admin is also an owner admin_row that change
1986 1997 # would propagate to both objects
1987 1998 perm_rows = []
1988 1999 for _usr in q.all():
1989 2000 usr = AttributeDict(_usr.user.get_dict())
1990 2001 # if this user is also owner/admin, mark as duplicate record
1991 2002 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1992 2003 usr.duplicate_perm = True
1993 2004 # also check if this permission is maybe used by branch_permissions
1994 2005 if _usr.branch_perm_entry:
1995 2006 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
1996 2007
1997 2008 usr.permission = _usr.permission.permission_name
1998 2009 usr.permission_id = _usr.repo_to_perm_id
1999 2010 perm_rows.append(usr)
2000 2011
2001 2012 # filter the perm rows by 'default' first and then sort them by
2002 2013 # admin,write,read,none permissions sorted again alphabetically in
2003 2014 # each group
2004 2015 perm_rows = sorted(perm_rows, key=display_user_sort)
2005 2016
2006 2017 user_groups_rows = []
2007 2018 if expand_from_user_groups:
2008 2019 for ug in self.permission_user_groups(with_members=True):
2009 2020 for user_data in ug.members:
2010 2021 user_groups_rows.append(user_data)
2011 2022
2012 2023 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2013 2024
2014 2025 def permission_user_groups(self, with_members=True):
2015 2026 q = UserGroupRepoToPerm.query()\
2016 2027 .filter(UserGroupRepoToPerm.repository == self)
2017 2028 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2018 2029 joinedload(UserGroupRepoToPerm.users_group),
2019 2030 joinedload(UserGroupRepoToPerm.permission),)
2020 2031
2021 2032 perm_rows = []
2022 2033 for _user_group in q.all():
2023 2034 entry = AttributeDict(_user_group.users_group.get_dict())
2024 2035 entry.permission = _user_group.permission.permission_name
2025 2036 if with_members:
2026 2037 entry.members = [x.user.get_dict()
2027 2038 for x in _user_group.users_group.members]
2028 2039 perm_rows.append(entry)
2029 2040
2030 2041 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2031 2042 return perm_rows
2032 2043
2033 2044 def get_api_data(self, include_secrets=False):
2034 2045 """
2035 2046 Common function for generating repo api data
2036 2047
2037 2048 :param include_secrets: See :meth:`User.get_api_data`.
2038 2049
2039 2050 """
2040 2051 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2041 2052 # move this methods on models level.
2042 2053 from rhodecode.model.settings import SettingsModel
2043 2054 from rhodecode.model.repo import RepoModel
2044 2055
2045 2056 repo = self
2046 2057 _user_id, _time, _reason = self.locked
2047 2058
2048 2059 data = {
2049 2060 'repo_id': repo.repo_id,
2050 2061 'repo_name': repo.repo_name,
2051 2062 'repo_type': repo.repo_type,
2052 2063 'clone_uri': repo.clone_uri or '',
2053 2064 'push_uri': repo.push_uri or '',
2054 2065 'url': RepoModel().get_url(self),
2055 2066 'private': repo.private,
2056 2067 'created_on': repo.created_on,
2057 2068 'description': repo.description_safe,
2058 2069 'landing_rev': repo.landing_rev,
2059 2070 'owner': repo.user.username,
2060 2071 'fork_of': repo.fork.repo_name if repo.fork else None,
2061 2072 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2062 2073 'enable_statistics': repo.enable_statistics,
2063 2074 'enable_locking': repo.enable_locking,
2064 2075 'enable_downloads': repo.enable_downloads,
2065 2076 'last_changeset': repo.changeset_cache,
2066 2077 'locked_by': User.get(_user_id).get_api_data(
2067 2078 include_secrets=include_secrets) if _user_id else None,
2068 2079 'locked_date': time_to_datetime(_time) if _time else None,
2069 2080 'lock_reason': _reason if _reason else None,
2070 2081 }
2071 2082
2072 2083 # TODO: mikhail: should be per-repo settings here
2073 2084 rc_config = SettingsModel().get_all_settings()
2074 2085 repository_fields = str2bool(
2075 2086 rc_config.get('rhodecode_repository_fields'))
2076 2087 if repository_fields:
2077 2088 for f in self.extra_fields:
2078 2089 data[f.field_key_prefixed] = f.field_value
2079 2090
2080 2091 return data
2081 2092
2082 2093 @classmethod
2083 2094 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2084 2095 if not lock_time:
2085 2096 lock_time = time.time()
2086 2097 if not lock_reason:
2087 2098 lock_reason = cls.LOCK_AUTOMATIC
2088 2099 repo.locked = [user_id, lock_time, lock_reason]
2089 2100 Session().add(repo)
2090 2101 Session().commit()
2091 2102
2092 2103 @classmethod
2093 2104 def unlock(cls, repo):
2094 2105 repo.locked = None
2095 2106 Session().add(repo)
2096 2107 Session().commit()
2097 2108
2098 2109 @classmethod
2099 2110 def getlock(cls, repo):
2100 2111 return repo.locked
2101 2112
2102 2113 def is_user_lock(self, user_id):
2103 2114 if self.lock[0]:
2104 2115 lock_user_id = safe_int(self.lock[0])
2105 2116 user_id = safe_int(user_id)
2106 2117 # both are ints, and they are equal
2107 2118 return all([lock_user_id, user_id]) and lock_user_id == user_id
2108 2119
2109 2120 return False
2110 2121
2111 2122 def get_locking_state(self, action, user_id, only_when_enabled=True):
2112 2123 """
2113 2124 Checks locking on this repository, if locking is enabled and lock is
2114 2125 present returns a tuple of make_lock, locked, locked_by.
2115 2126 make_lock can have 3 states None (do nothing) True, make lock
2116 2127 False release lock, This value is later propagated to hooks, which
2117 2128 do the locking. Think about this as signals passed to hooks what to do.
2118 2129
2119 2130 """
2120 2131 # TODO: johbo: This is part of the business logic and should be moved
2121 2132 # into the RepositoryModel.
2122 2133
2123 2134 if action not in ('push', 'pull'):
2124 2135 raise ValueError("Invalid action value: %s" % repr(action))
2125 2136
2126 2137 # defines if locked error should be thrown to user
2127 2138 currently_locked = False
2128 2139 # defines if new lock should be made, tri-state
2129 2140 make_lock = None
2130 2141 repo = self
2131 2142 user = User.get(user_id)
2132 2143
2133 2144 lock_info = repo.locked
2134 2145
2135 2146 if repo and (repo.enable_locking or not only_when_enabled):
2136 2147 if action == 'push':
2137 2148 # check if it's already locked !, if it is compare users
2138 2149 locked_by_user_id = lock_info[0]
2139 2150 if user.user_id == locked_by_user_id:
2140 2151 log.debug(
2141 2152 'Got `push` action from user %s, now unlocking', user)
2142 2153 # unlock if we have push from user who locked
2143 2154 make_lock = False
2144 2155 else:
2145 2156 # we're not the same user who locked, ban with
2146 2157 # code defined in settings (default is 423 HTTP Locked) !
2147 2158 log.debug('Repo %s is currently locked by %s', repo, user)
2148 2159 currently_locked = True
2149 2160 elif action == 'pull':
2150 2161 # [0] user [1] date
2151 2162 if lock_info[0] and lock_info[1]:
2152 2163 log.debug('Repo %s is currently locked by %s', repo, user)
2153 2164 currently_locked = True
2154 2165 else:
2155 2166 log.debug('Setting lock on repo %s by %s', repo, user)
2156 2167 make_lock = True
2157 2168
2158 2169 else:
2159 2170 log.debug('Repository %s do not have locking enabled', repo)
2160 2171
2161 2172 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2162 2173 make_lock, currently_locked, lock_info)
2163 2174
2164 2175 from rhodecode.lib.auth import HasRepoPermissionAny
2165 2176 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2166 2177 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2167 2178 # if we don't have at least write permission we cannot make a lock
2168 2179 log.debug('lock state reset back to FALSE due to lack '
2169 2180 'of at least read permission')
2170 2181 make_lock = False
2171 2182
2172 2183 return make_lock, currently_locked, lock_info
2173 2184
2174 2185 @property
2175 2186 def last_db_change(self):
2176 2187 return self.updated_on
2177 2188
2178 2189 @property
2179 2190 def clone_uri_hidden(self):
2180 2191 clone_uri = self.clone_uri
2181 2192 if clone_uri:
2182 2193 import urlobject
2183 2194 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2184 2195 if url_obj.password:
2185 2196 clone_uri = url_obj.with_password('*****')
2186 2197 return clone_uri
2187 2198
2188 2199 @property
2189 2200 def push_uri_hidden(self):
2190 2201 push_uri = self.push_uri
2191 2202 if push_uri:
2192 2203 import urlobject
2193 2204 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2194 2205 if url_obj.password:
2195 2206 push_uri = url_obj.with_password('*****')
2196 2207 return push_uri
2197 2208
2198 2209 def clone_url(self, **override):
2199 2210 from rhodecode.model.settings import SettingsModel
2200 2211
2201 2212 uri_tmpl = None
2202 2213 if 'with_id' in override:
2203 2214 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2204 2215 del override['with_id']
2205 2216
2206 2217 if 'uri_tmpl' in override:
2207 2218 uri_tmpl = override['uri_tmpl']
2208 2219 del override['uri_tmpl']
2209 2220
2210 2221 ssh = False
2211 2222 if 'ssh' in override:
2212 2223 ssh = True
2213 2224 del override['ssh']
2214 2225
2215 2226 # we didn't override our tmpl from **overrides
2216 2227 if not uri_tmpl:
2217 2228 rc_config = SettingsModel().get_all_settings(cache=True)
2218 2229 if ssh:
2219 2230 uri_tmpl = rc_config.get(
2220 2231 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2221 2232 else:
2222 2233 uri_tmpl = rc_config.get(
2223 2234 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2224 2235
2225 2236 request = get_current_request()
2226 2237 return get_clone_url(request=request,
2227 2238 uri_tmpl=uri_tmpl,
2228 2239 repo_name=self.repo_name,
2229 2240 repo_id=self.repo_id, **override)
2230 2241
2231 2242 def set_state(self, state):
2232 2243 self.repo_state = state
2233 2244 Session().add(self)
2234 2245 #==========================================================================
2235 2246 # SCM PROPERTIES
2236 2247 #==========================================================================
2237 2248
2238 2249 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2239 2250 return get_commit_safe(
2240 2251 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2241 2252
2242 2253 def get_changeset(self, rev=None, pre_load=None):
2243 2254 warnings.warn("Use get_commit", DeprecationWarning)
2244 2255 commit_id = None
2245 2256 commit_idx = None
2246 2257 if isinstance(rev, compat.string_types):
2247 2258 commit_id = rev
2248 2259 else:
2249 2260 commit_idx = rev
2250 2261 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2251 2262 pre_load=pre_load)
2252 2263
2253 2264 def get_landing_commit(self):
2254 2265 """
2255 2266 Returns landing commit, or if that doesn't exist returns the tip
2256 2267 """
2257 2268 _rev_type, _rev = self.landing_rev
2258 2269 commit = self.get_commit(_rev)
2259 2270 if isinstance(commit, EmptyCommit):
2260 2271 return self.get_commit()
2261 2272 return commit
2262 2273
2263 2274 def update_commit_cache(self, cs_cache=None, config=None):
2264 2275 """
2265 2276 Update cache of last changeset for repository, keys should be::
2266 2277
2267 2278 short_id
2268 2279 raw_id
2269 2280 revision
2270 2281 parents
2271 2282 message
2272 2283 date
2273 2284 author
2274 2285
2275 2286 :param cs_cache:
2276 2287 """
2277 2288 from rhodecode.lib.vcs.backends.base import BaseChangeset
2278 2289 if cs_cache is None:
2279 2290 # use no-cache version here
2280 2291 scm_repo = self.scm_instance(cache=False, config=config)
2281 2292
2282 2293 empty = not scm_repo or scm_repo.is_empty()
2283 2294 if not empty:
2284 2295 cs_cache = scm_repo.get_commit(
2285 2296 pre_load=["author", "date", "message", "parents"])
2286 2297 else:
2287 2298 cs_cache = EmptyCommit()
2288 2299
2289 2300 if isinstance(cs_cache, BaseChangeset):
2290 2301 cs_cache = cs_cache.__json__()
2291 2302
2292 2303 def is_outdated(new_cs_cache):
2293 2304 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2294 2305 new_cs_cache['revision'] != self.changeset_cache['revision']):
2295 2306 return True
2296 2307 return False
2297 2308
2298 2309 # check if we have maybe already latest cached revision
2299 2310 if is_outdated(cs_cache) or not self.changeset_cache:
2300 2311 _default = datetime.datetime.utcnow()
2301 2312 last_change = cs_cache.get('date') or _default
2302 2313 if self.updated_on and self.updated_on > last_change:
2303 2314 # we check if last update is newer than the new value
2304 2315 # if yes, we use the current timestamp instead. Imagine you get
2305 2316 # old commit pushed 1y ago, we'd set last update 1y to ago.
2306 2317 last_change = _default
2307 2318 log.debug('updated repo %s with new cs cache %s',
2308 2319 self.repo_name, cs_cache)
2309 2320 self.updated_on = last_change
2310 2321 self.changeset_cache = cs_cache
2311 2322 Session().add(self)
2312 2323 Session().commit()
2313 2324 else:
2314 2325 log.debug('Skipping update_commit_cache for repo:`%s` '
2315 2326 'commit already with latest changes', self.repo_name)
2316 2327
2317 2328 @property
2318 2329 def tip(self):
2319 2330 return self.get_commit('tip')
2320 2331
2321 2332 @property
2322 2333 def author(self):
2323 2334 return self.tip.author
2324 2335
2325 2336 @property
2326 2337 def last_change(self):
2327 2338 return self.scm_instance().last_change
2328 2339
2329 2340 def get_comments(self, revisions=None):
2330 2341 """
2331 2342 Returns comments for this repository grouped by revisions
2332 2343
2333 2344 :param revisions: filter query by revisions only
2334 2345 """
2335 2346 cmts = ChangesetComment.query()\
2336 2347 .filter(ChangesetComment.repo == self)
2337 2348 if revisions:
2338 2349 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2339 2350 grouped = collections.defaultdict(list)
2340 2351 for cmt in cmts.all():
2341 2352 grouped[cmt.revision].append(cmt)
2342 2353 return grouped
2343 2354
2344 2355 def statuses(self, revisions=None):
2345 2356 """
2346 2357 Returns statuses for this repository
2347 2358
2348 2359 :param revisions: list of revisions to get statuses for
2349 2360 """
2350 2361 statuses = ChangesetStatus.query()\
2351 2362 .filter(ChangesetStatus.repo == self)\
2352 2363 .filter(ChangesetStatus.version == 0)
2353 2364
2354 2365 if revisions:
2355 2366 # Try doing the filtering in chunks to avoid hitting limits
2356 2367 size = 500
2357 2368 status_results = []
2358 2369 for chunk in xrange(0, len(revisions), size):
2359 2370 status_results += statuses.filter(
2360 2371 ChangesetStatus.revision.in_(
2361 2372 revisions[chunk: chunk+size])
2362 2373 ).all()
2363 2374 else:
2364 2375 status_results = statuses.all()
2365 2376
2366 2377 grouped = {}
2367 2378
2368 2379 # maybe we have open new pullrequest without a status?
2369 2380 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2370 2381 status_lbl = ChangesetStatus.get_status_lbl(stat)
2371 2382 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2372 2383 for rev in pr.revisions:
2373 2384 pr_id = pr.pull_request_id
2374 2385 pr_repo = pr.target_repo.repo_name
2375 2386 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2376 2387
2377 2388 for stat in status_results:
2378 2389 pr_id = pr_repo = None
2379 2390 if stat.pull_request:
2380 2391 pr_id = stat.pull_request.pull_request_id
2381 2392 pr_repo = stat.pull_request.target_repo.repo_name
2382 2393 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2383 2394 pr_id, pr_repo]
2384 2395 return grouped
2385 2396
2386 2397 # ==========================================================================
2387 2398 # SCM CACHE INSTANCE
2388 2399 # ==========================================================================
2389 2400
2390 2401 def scm_instance(self, **kwargs):
2391 2402 import rhodecode
2392 2403
2393 2404 # Passing a config will not hit the cache currently only used
2394 2405 # for repo2dbmapper
2395 2406 config = kwargs.pop('config', None)
2396 2407 cache = kwargs.pop('cache', None)
2397 2408 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2398 2409 # if cache is NOT defined use default global, else we have a full
2399 2410 # control over cache behaviour
2400 2411 if cache is None and full_cache and not config:
2401 2412 return self._get_instance_cached()
2402 2413 return self._get_instance(cache=bool(cache), config=config)
2403 2414
2404 2415 def _get_instance_cached(self):
2405 2416 from rhodecode.lib import rc_cache
2406 2417
2407 2418 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2408 2419 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2409 2420 repo_id=self.repo_id)
2410 2421 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2411 2422
2412 2423 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2413 2424 def get_instance_cached(repo_id, context_id):
2414 2425 return self._get_instance()
2415 2426
2416 2427 # we must use thread scoped cache here,
2417 2428 # because each thread of gevent needs it's own not shared connection and cache
2418 2429 # we also alter `args` so the cache key is individual for every green thread.
2419 2430 inv_context_manager = rc_cache.InvalidationContext(
2420 2431 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2421 2432 thread_scoped=True)
2422 2433 with inv_context_manager as invalidation_context:
2423 2434 args = (self.repo_id, inv_context_manager.cache_key)
2424 2435 # re-compute and store cache if we get invalidate signal
2425 2436 if invalidation_context.should_invalidate():
2426 2437 instance = get_instance_cached.refresh(*args)
2427 2438 else:
2428 2439 instance = get_instance_cached(*args)
2429 2440
2430 2441 log.debug(
2431 2442 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2432 2443 return instance
2433 2444
2434 2445 def _get_instance(self, cache=True, config=None):
2435 2446 config = config or self._config
2436 2447 custom_wire = {
2437 2448 'cache': cache # controls the vcs.remote cache
2438 2449 }
2439 2450 repo = get_vcs_instance(
2440 2451 repo_path=safe_str(self.repo_full_path),
2441 2452 config=config,
2442 2453 with_wire=custom_wire,
2443 2454 create=False,
2444 2455 _vcs_alias=self.repo_type)
2445 2456
2446 2457 return repo
2447 2458
2448 2459 def __json__(self):
2449 2460 return {'landing_rev': self.landing_rev}
2450 2461
2451 2462 def get_dict(self):
2452 2463
2453 2464 # Since we transformed `repo_name` to a hybrid property, we need to
2454 2465 # keep compatibility with the code which uses `repo_name` field.
2455 2466
2456 2467 result = super(Repository, self).get_dict()
2457 2468 result['repo_name'] = result.pop('_repo_name', None)
2458 2469 return result
2459 2470
2460 2471
2461 2472 class RepoGroup(Base, BaseModel):
2462 2473 __tablename__ = 'groups'
2463 2474 __table_args__ = (
2464 2475 UniqueConstraint('group_name', 'group_parent_id'),
2465 2476 base_table_args,
2466 2477 )
2467 2478 __mapper_args__ = {'order_by': 'group_name'}
2468 2479
2469 2480 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2470 2481
2471 2482 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2472 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2483 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2484 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2473 2485 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2474 2486 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2475 2487 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2476 2488 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2477 2489 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2478 2490 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2479 2491 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2480 2492
2481 2493 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2482 2494 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2483 2495 parent_group = relationship('RepoGroup', remote_side=group_id)
2484 2496 user = relationship('User')
2485 2497 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
2486 2498
2487 2499 def __init__(self, group_name='', parent_group=None):
2488 2500 self.group_name = group_name
2489 2501 self.parent_group = parent_group
2490 2502
2491 2503 def __unicode__(self):
2492 2504 return u"<%s('id:%s:%s')>" % (
2493 2505 self.__class__.__name__, self.group_id, self.group_name)
2494 2506
2507 @hybrid_property
2508 def group_name(self):
2509 return self._group_name
2510
2511 @group_name.setter
2512 def group_name(self, value):
2513 self._group_name = value
2514 self.group_name_hash = self.hash_repo_group_name(value)
2515
2495 2516 @validates('group_parent_id')
2496 2517 def validate_group_parent_id(self, key, val):
2497 2518 """
2498 2519 Check cycle references for a parent group to self
2499 2520 """
2500 2521 if self.group_id and val:
2501 2522 assert val != self.group_id
2502 2523
2503 2524 return val
2504 2525
2505 2526 @hybrid_property
2506 2527 def description_safe(self):
2507 2528 from rhodecode.lib import helpers as h
2508 2529 return h.escape(self.group_description)
2509 2530
2510 2531 @classmethod
2532 def hash_repo_group_name(cls, repo_group_name):
2533 val = remove_formatting(repo_group_name)
2534 val = safe_str(val).lower()
2535 chars = []
2536 for c in val:
2537 if c not in string.ascii_letters:
2538 c = str(ord(c))
2539 chars.append(c)
2540
2541 return ''.join(chars)
2542
2543 @classmethod
2511 2544 def _generate_choice(cls, repo_group):
2512 2545 from webhelpers.html import literal as _literal
2513 2546 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2514 2547 return repo_group.group_id, _name(repo_group.full_path_splitted)
2515 2548
2516 2549 @classmethod
2517 2550 def groups_choices(cls, groups=None, show_empty_group=True):
2518 2551 if not groups:
2519 2552 groups = cls.query().all()
2520 2553
2521 2554 repo_groups = []
2522 2555 if show_empty_group:
2523 2556 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2524 2557
2525 2558 repo_groups.extend([cls._generate_choice(x) for x in groups])
2526 2559
2527 2560 repo_groups = sorted(
2528 2561 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2529 2562 return repo_groups
2530 2563
2531 2564 @classmethod
2532 2565 def url_sep(cls):
2533 2566 return URL_SEP
2534 2567
2535 2568 @classmethod
2536 2569 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2537 2570 if case_insensitive:
2538 2571 gr = cls.query().filter(func.lower(cls.group_name)
2539 2572 == func.lower(group_name))
2540 2573 else:
2541 2574 gr = cls.query().filter(cls.group_name == group_name)
2542 2575 if cache:
2543 2576 name_key = _hash_key(group_name)
2544 2577 gr = gr.options(
2545 2578 FromCache("sql_cache_short", "get_group_%s" % name_key))
2546 2579 return gr.scalar()
2547 2580
2548 2581 @classmethod
2549 2582 def get_user_personal_repo_group(cls, user_id):
2550 2583 user = User.get(user_id)
2551 2584 if user.username == User.DEFAULT_USER:
2552 2585 return None
2553 2586
2554 2587 return cls.query()\
2555 2588 .filter(cls.personal == true()) \
2556 2589 .filter(cls.user == user) \
2557 2590 .order_by(cls.group_id.asc()) \
2558 2591 .first()
2559 2592
2560 2593 @classmethod
2561 2594 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2562 2595 case_insensitive=True):
2563 2596 q = RepoGroup.query()
2564 2597
2565 2598 if not isinstance(user_id, Optional):
2566 2599 q = q.filter(RepoGroup.user_id == user_id)
2567 2600
2568 2601 if not isinstance(group_id, Optional):
2569 2602 q = q.filter(RepoGroup.group_parent_id == group_id)
2570 2603
2571 2604 if case_insensitive:
2572 2605 q = q.order_by(func.lower(RepoGroup.group_name))
2573 2606 else:
2574 2607 q = q.order_by(RepoGroup.group_name)
2575 2608 return q.all()
2576 2609
2577 2610 @property
2578 2611 def parents(self):
2579 2612 parents_recursion_limit = 10
2580 2613 groups = []
2581 2614 if self.parent_group is None:
2582 2615 return groups
2583 2616 cur_gr = self.parent_group
2584 2617 groups.insert(0, cur_gr)
2585 2618 cnt = 0
2586 2619 while 1:
2587 2620 cnt += 1
2588 2621 gr = getattr(cur_gr, 'parent_group', None)
2589 2622 cur_gr = cur_gr.parent_group
2590 2623 if gr is None:
2591 2624 break
2592 2625 if cnt == parents_recursion_limit:
2593 2626 # this will prevent accidental infinit loops
2594 2627 log.error('more than %s parents found for group %s, stopping '
2595 2628 'recursive parent fetching', parents_recursion_limit, self)
2596 2629 break
2597 2630
2598 2631 groups.insert(0, gr)
2599 2632 return groups
2600 2633
2601 2634 @property
2602 2635 def last_db_change(self):
2603 2636 return self.updated_on
2604 2637
2605 2638 @property
2606 2639 def children(self):
2607 2640 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2608 2641
2609 2642 @property
2610 2643 def name(self):
2611 2644 return self.group_name.split(RepoGroup.url_sep())[-1]
2612 2645
2613 2646 @property
2614 2647 def full_path(self):
2615 2648 return self.group_name
2616 2649
2617 2650 @property
2618 2651 def full_path_splitted(self):
2619 2652 return self.group_name.split(RepoGroup.url_sep())
2620 2653
2621 2654 @property
2622 2655 def repositories(self):
2623 2656 return Repository.query()\
2624 2657 .filter(Repository.group == self)\
2625 2658 .order_by(Repository.repo_name)
2626 2659
2627 2660 @property
2628 2661 def repositories_recursive_count(self):
2629 2662 cnt = self.repositories.count()
2630 2663
2631 2664 def children_count(group):
2632 2665 cnt = 0
2633 2666 for child in group.children:
2634 2667 cnt += child.repositories.count()
2635 2668 cnt += children_count(child)
2636 2669 return cnt
2637 2670
2638 2671 return cnt + children_count(self)
2639 2672
2640 2673 def _recursive_objects(self, include_repos=True):
2641 2674 all_ = []
2642 2675
2643 2676 def _get_members(root_gr):
2644 2677 if include_repos:
2645 2678 for r in root_gr.repositories:
2646 2679 all_.append(r)
2647 2680 childs = root_gr.children.all()
2648 2681 if childs:
2649 2682 for gr in childs:
2650 2683 all_.append(gr)
2651 2684 _get_members(gr)
2652 2685
2653 2686 _get_members(self)
2654 2687 return [self] + all_
2655 2688
2656 2689 def recursive_groups_and_repos(self):
2657 2690 """
2658 2691 Recursive return all groups, with repositories in those groups
2659 2692 """
2660 2693 return self._recursive_objects()
2661 2694
2662 2695 def recursive_groups(self):
2663 2696 """
2664 2697 Returns all children groups for this group including children of children
2665 2698 """
2666 2699 return self._recursive_objects(include_repos=False)
2667 2700
2668 2701 def get_new_name(self, group_name):
2669 2702 """
2670 2703 returns new full group name based on parent and new name
2671 2704
2672 2705 :param group_name:
2673 2706 """
2674 2707 path_prefix = (self.parent_group.full_path_splitted if
2675 2708 self.parent_group else [])
2676 2709 return RepoGroup.url_sep().join(path_prefix + [group_name])
2677 2710
2678 2711 def permissions(self, with_admins=True, with_owner=True,
2679 2712 expand_from_user_groups=False):
2680 2713 """
2681 2714 Permissions for repository groups
2682 2715 """
2683 2716 _admin_perm = 'group.admin'
2684 2717
2685 2718 owner_row = []
2686 2719 if with_owner:
2687 2720 usr = AttributeDict(self.user.get_dict())
2688 2721 usr.owner_row = True
2689 2722 usr.permission = _admin_perm
2690 2723 owner_row.append(usr)
2691 2724
2692 2725 super_admin_ids = []
2693 2726 super_admin_rows = []
2694 2727 if with_admins:
2695 2728 for usr in User.get_all_super_admins():
2696 2729 super_admin_ids.append(usr.user_id)
2697 2730 # if this admin is also owner, don't double the record
2698 2731 if usr.user_id == owner_row[0].user_id:
2699 2732 owner_row[0].admin_row = True
2700 2733 else:
2701 2734 usr = AttributeDict(usr.get_dict())
2702 2735 usr.admin_row = True
2703 2736 usr.permission = _admin_perm
2704 2737 super_admin_rows.append(usr)
2705 2738
2706 2739 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2707 2740 q = q.options(joinedload(UserRepoGroupToPerm.group),
2708 2741 joinedload(UserRepoGroupToPerm.user),
2709 2742 joinedload(UserRepoGroupToPerm.permission),)
2710 2743
2711 2744 # get owners and admins and permissions. We do a trick of re-writing
2712 2745 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2713 2746 # has a global reference and changing one object propagates to all
2714 2747 # others. This means if admin is also an owner admin_row that change
2715 2748 # would propagate to both objects
2716 2749 perm_rows = []
2717 2750 for _usr in q.all():
2718 2751 usr = AttributeDict(_usr.user.get_dict())
2719 2752 # if this user is also owner/admin, mark as duplicate record
2720 2753 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2721 2754 usr.duplicate_perm = True
2722 2755 usr.permission = _usr.permission.permission_name
2723 2756 perm_rows.append(usr)
2724 2757
2725 2758 # filter the perm rows by 'default' first and then sort them by
2726 2759 # admin,write,read,none permissions sorted again alphabetically in
2727 2760 # each group
2728 2761 perm_rows = sorted(perm_rows, key=display_user_sort)
2729 2762
2730 2763 user_groups_rows = []
2731 2764 if expand_from_user_groups:
2732 2765 for ug in self.permission_user_groups(with_members=True):
2733 2766 for user_data in ug.members:
2734 2767 user_groups_rows.append(user_data)
2735 2768
2736 2769 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2737 2770
2738 2771 def permission_user_groups(self, with_members=False):
2739 2772 q = UserGroupRepoGroupToPerm.query()\
2740 2773 .filter(UserGroupRepoGroupToPerm.group == self)
2741 2774 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2742 2775 joinedload(UserGroupRepoGroupToPerm.users_group),
2743 2776 joinedload(UserGroupRepoGroupToPerm.permission),)
2744 2777
2745 2778 perm_rows = []
2746 2779 for _user_group in q.all():
2747 2780 entry = AttributeDict(_user_group.users_group.get_dict())
2748 2781 entry.permission = _user_group.permission.permission_name
2749 2782 if with_members:
2750 2783 entry.members = [x.user.get_dict()
2751 2784 for x in _user_group.users_group.members]
2752 2785 perm_rows.append(entry)
2753 2786
2754 2787 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2755 2788 return perm_rows
2756 2789
2757 2790 def get_api_data(self):
2758 2791 """
2759 2792 Common function for generating api data
2760 2793
2761 2794 """
2762 2795 group = self
2763 2796 data = {
2764 2797 'group_id': group.group_id,
2765 2798 'group_name': group.group_name,
2766 2799 'group_description': group.description_safe,
2767 2800 'parent_group': group.parent_group.group_name if group.parent_group else None,
2768 2801 'repositories': [x.repo_name for x in group.repositories],
2769 2802 'owner': group.user.username,
2770 2803 }
2771 2804 return data
2772 2805
2806 def get_dict(self):
2807 # Since we transformed `group_name` to a hybrid property, we need to
2808 # keep compatibility with the code which uses `group_name` field.
2809 result = super(RepoGroup, self).get_dict()
2810 result['group_name'] = result.pop('_group_name', None)
2811 return result
2812
2773 2813
2774 2814 class Permission(Base, BaseModel):
2775 2815 __tablename__ = 'permissions'
2776 2816 __table_args__ = (
2777 2817 Index('p_perm_name_idx', 'permission_name'),
2778 2818 base_table_args,
2779 2819 )
2780 2820
2781 2821 PERMS = [
2782 2822 ('hg.admin', _('RhodeCode Super Administrator')),
2783 2823
2784 2824 ('repository.none', _('Repository no access')),
2785 2825 ('repository.read', _('Repository read access')),
2786 2826 ('repository.write', _('Repository write access')),
2787 2827 ('repository.admin', _('Repository admin access')),
2788 2828
2789 2829 ('group.none', _('Repository group no access')),
2790 2830 ('group.read', _('Repository group read access')),
2791 2831 ('group.write', _('Repository group write access')),
2792 2832 ('group.admin', _('Repository group admin access')),
2793 2833
2794 2834 ('usergroup.none', _('User group no access')),
2795 2835 ('usergroup.read', _('User group read access')),
2796 2836 ('usergroup.write', _('User group write access')),
2797 2837 ('usergroup.admin', _('User group admin access')),
2798 2838
2799 2839 ('branch.none', _('Branch no permissions')),
2800 2840 ('branch.merge', _('Branch access by web merge')),
2801 2841 ('branch.push', _('Branch access by push')),
2802 2842 ('branch.push_force', _('Branch access by push with force')),
2803 2843
2804 2844 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2805 2845 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2806 2846
2807 2847 ('hg.usergroup.create.false', _('User Group creation disabled')),
2808 2848 ('hg.usergroup.create.true', _('User Group creation enabled')),
2809 2849
2810 2850 ('hg.create.none', _('Repository creation disabled')),
2811 2851 ('hg.create.repository', _('Repository creation enabled')),
2812 2852 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2813 2853 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2814 2854
2815 2855 ('hg.fork.none', _('Repository forking disabled')),
2816 2856 ('hg.fork.repository', _('Repository forking enabled')),
2817 2857
2818 2858 ('hg.register.none', _('Registration disabled')),
2819 2859 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2820 2860 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2821 2861
2822 2862 ('hg.password_reset.enabled', _('Password reset enabled')),
2823 2863 ('hg.password_reset.hidden', _('Password reset hidden')),
2824 2864 ('hg.password_reset.disabled', _('Password reset disabled')),
2825 2865
2826 2866 ('hg.extern_activate.manual', _('Manual activation of external account')),
2827 2867 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2828 2868
2829 2869 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2830 2870 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2831 2871 ]
2832 2872
2833 2873 # definition of system default permissions for DEFAULT user, created on
2834 2874 # system setup
2835 2875 DEFAULT_USER_PERMISSIONS = [
2836 2876 # object perms
2837 2877 'repository.read',
2838 2878 'group.read',
2839 2879 'usergroup.read',
2840 2880 # branch, for backward compat we need same value as before so forced pushed
2841 2881 'branch.push_force',
2842 2882 # global
2843 2883 'hg.create.repository',
2844 2884 'hg.repogroup.create.false',
2845 2885 'hg.usergroup.create.false',
2846 2886 'hg.create.write_on_repogroup.true',
2847 2887 'hg.fork.repository',
2848 2888 'hg.register.manual_activate',
2849 2889 'hg.password_reset.enabled',
2850 2890 'hg.extern_activate.auto',
2851 2891 'hg.inherit_default_perms.true',
2852 2892 ]
2853 2893
2854 2894 # defines which permissions are more important higher the more important
2855 2895 # Weight defines which permissions are more important.
2856 2896 # The higher number the more important.
2857 2897 PERM_WEIGHTS = {
2858 2898 'repository.none': 0,
2859 2899 'repository.read': 1,
2860 2900 'repository.write': 3,
2861 2901 'repository.admin': 4,
2862 2902
2863 2903 'group.none': 0,
2864 2904 'group.read': 1,
2865 2905 'group.write': 3,
2866 2906 'group.admin': 4,
2867 2907
2868 2908 'usergroup.none': 0,
2869 2909 'usergroup.read': 1,
2870 2910 'usergroup.write': 3,
2871 2911 'usergroup.admin': 4,
2872 2912
2873 2913 'branch.none': 0,
2874 2914 'branch.merge': 1,
2875 2915 'branch.push': 3,
2876 2916 'branch.push_force': 4,
2877 2917
2878 2918 'hg.repogroup.create.false': 0,
2879 2919 'hg.repogroup.create.true': 1,
2880 2920
2881 2921 'hg.usergroup.create.false': 0,
2882 2922 'hg.usergroup.create.true': 1,
2883 2923
2884 2924 'hg.fork.none': 0,
2885 2925 'hg.fork.repository': 1,
2886 2926 'hg.create.none': 0,
2887 2927 'hg.create.repository': 1
2888 2928 }
2889 2929
2890 2930 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2891 2931 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2892 2932 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2893 2933
2894 2934 def __unicode__(self):
2895 2935 return u"<%s('%s:%s')>" % (
2896 2936 self.__class__.__name__, self.permission_id, self.permission_name
2897 2937 )
2898 2938
2899 2939 @classmethod
2900 2940 def get_by_key(cls, key):
2901 2941 return cls.query().filter(cls.permission_name == key).scalar()
2902 2942
2903 2943 @classmethod
2904 2944 def get_default_repo_perms(cls, user_id, repo_id=None):
2905 2945 q = Session().query(UserRepoToPerm, Repository, Permission)\
2906 2946 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2907 2947 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2908 2948 .filter(UserRepoToPerm.user_id == user_id)
2909 2949 if repo_id:
2910 2950 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2911 2951 return q.all()
2912 2952
2913 2953 @classmethod
2914 2954 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
2915 2955 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
2916 2956 .join(
2917 2957 Permission,
2918 2958 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
2919 2959 .join(
2920 2960 UserRepoToPerm,
2921 2961 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
2922 2962 .filter(UserRepoToPerm.user_id == user_id)
2923 2963
2924 2964 if repo_id:
2925 2965 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
2926 2966 return q.order_by(UserToRepoBranchPermission.rule_order).all()
2927 2967
2928 2968 @classmethod
2929 2969 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2930 2970 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2931 2971 .join(
2932 2972 Permission,
2933 2973 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2934 2974 .join(
2935 2975 Repository,
2936 2976 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2937 2977 .join(
2938 2978 UserGroup,
2939 2979 UserGroupRepoToPerm.users_group_id ==
2940 2980 UserGroup.users_group_id)\
2941 2981 .join(
2942 2982 UserGroupMember,
2943 2983 UserGroupRepoToPerm.users_group_id ==
2944 2984 UserGroupMember.users_group_id)\
2945 2985 .filter(
2946 2986 UserGroupMember.user_id == user_id,
2947 2987 UserGroup.users_group_active == true())
2948 2988 if repo_id:
2949 2989 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2950 2990 return q.all()
2951 2991
2952 2992 @classmethod
2953 2993 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
2954 2994 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
2955 2995 .join(
2956 2996 Permission,
2957 2997 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
2958 2998 .join(
2959 2999 UserGroupRepoToPerm,
2960 3000 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
2961 3001 .join(
2962 3002 UserGroup,
2963 3003 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
2964 3004 .join(
2965 3005 UserGroupMember,
2966 3006 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
2967 3007 .filter(
2968 3008 UserGroupMember.user_id == user_id,
2969 3009 UserGroup.users_group_active == true())
2970 3010
2971 3011 if repo_id:
2972 3012 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
2973 3013 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
2974 3014
2975 3015 @classmethod
2976 3016 def get_default_group_perms(cls, user_id, repo_group_id=None):
2977 3017 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2978 3018 .join(
2979 3019 Permission,
2980 3020 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
2981 3021 .join(
2982 3022 RepoGroup,
2983 3023 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
2984 3024 .filter(UserRepoGroupToPerm.user_id == user_id)
2985 3025 if repo_group_id:
2986 3026 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2987 3027 return q.all()
2988 3028
2989 3029 @classmethod
2990 3030 def get_default_group_perms_from_user_group(
2991 3031 cls, user_id, repo_group_id=None):
2992 3032 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2993 3033 .join(
2994 3034 Permission,
2995 3035 UserGroupRepoGroupToPerm.permission_id ==
2996 3036 Permission.permission_id)\
2997 3037 .join(
2998 3038 RepoGroup,
2999 3039 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3000 3040 .join(
3001 3041 UserGroup,
3002 3042 UserGroupRepoGroupToPerm.users_group_id ==
3003 3043 UserGroup.users_group_id)\
3004 3044 .join(
3005 3045 UserGroupMember,
3006 3046 UserGroupRepoGroupToPerm.users_group_id ==
3007 3047 UserGroupMember.users_group_id)\
3008 3048 .filter(
3009 3049 UserGroupMember.user_id == user_id,
3010 3050 UserGroup.users_group_active == true())
3011 3051 if repo_group_id:
3012 3052 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3013 3053 return q.all()
3014 3054
3015 3055 @classmethod
3016 3056 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3017 3057 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3018 3058 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3019 3059 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3020 3060 .filter(UserUserGroupToPerm.user_id == user_id)
3021 3061 if user_group_id:
3022 3062 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3023 3063 return q.all()
3024 3064
3025 3065 @classmethod
3026 3066 def get_default_user_group_perms_from_user_group(
3027 3067 cls, user_id, user_group_id=None):
3028 3068 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3029 3069 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3030 3070 .join(
3031 3071 Permission,
3032 3072 UserGroupUserGroupToPerm.permission_id ==
3033 3073 Permission.permission_id)\
3034 3074 .join(
3035 3075 TargetUserGroup,
3036 3076 UserGroupUserGroupToPerm.target_user_group_id ==
3037 3077 TargetUserGroup.users_group_id)\
3038 3078 .join(
3039 3079 UserGroup,
3040 3080 UserGroupUserGroupToPerm.user_group_id ==
3041 3081 UserGroup.users_group_id)\
3042 3082 .join(
3043 3083 UserGroupMember,
3044 3084 UserGroupUserGroupToPerm.user_group_id ==
3045 3085 UserGroupMember.users_group_id)\
3046 3086 .filter(
3047 3087 UserGroupMember.user_id == user_id,
3048 3088 UserGroup.users_group_active == true())
3049 3089 if user_group_id:
3050 3090 q = q.filter(
3051 3091 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3052 3092
3053 3093 return q.all()
3054 3094
3055 3095
3056 3096 class UserRepoToPerm(Base, BaseModel):
3057 3097 __tablename__ = 'repo_to_perm'
3058 3098 __table_args__ = (
3059 3099 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3060 3100 base_table_args
3061 3101 )
3062 3102
3063 3103 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3064 3104 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3065 3105 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3066 3106 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3067 3107
3068 3108 user = relationship('User')
3069 3109 repository = relationship('Repository')
3070 3110 permission = relationship('Permission')
3071 3111
3072 3112 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3073 3113
3074 3114 @classmethod
3075 3115 def create(cls, user, repository, permission):
3076 3116 n = cls()
3077 3117 n.user = user
3078 3118 n.repository = repository
3079 3119 n.permission = permission
3080 3120 Session().add(n)
3081 3121 return n
3082 3122
3083 3123 def __unicode__(self):
3084 3124 return u'<%s => %s >' % (self.user, self.repository)
3085 3125
3086 3126
3087 3127 class UserUserGroupToPerm(Base, BaseModel):
3088 3128 __tablename__ = 'user_user_group_to_perm'
3089 3129 __table_args__ = (
3090 3130 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3091 3131 base_table_args
3092 3132 )
3093 3133
3094 3134 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3095 3135 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3096 3136 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3097 3137 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3098 3138
3099 3139 user = relationship('User')
3100 3140 user_group = relationship('UserGroup')
3101 3141 permission = relationship('Permission')
3102 3142
3103 3143 @classmethod
3104 3144 def create(cls, user, user_group, permission):
3105 3145 n = cls()
3106 3146 n.user = user
3107 3147 n.user_group = user_group
3108 3148 n.permission = permission
3109 3149 Session().add(n)
3110 3150 return n
3111 3151
3112 3152 def __unicode__(self):
3113 3153 return u'<%s => %s >' % (self.user, self.user_group)
3114 3154
3115 3155
3116 3156 class UserToPerm(Base, BaseModel):
3117 3157 __tablename__ = 'user_to_perm'
3118 3158 __table_args__ = (
3119 3159 UniqueConstraint('user_id', 'permission_id'),
3120 3160 base_table_args
3121 3161 )
3122 3162
3123 3163 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3124 3164 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3125 3165 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3126 3166
3127 3167 user = relationship('User')
3128 3168 permission = relationship('Permission', lazy='joined')
3129 3169
3130 3170 def __unicode__(self):
3131 3171 return u'<%s => %s >' % (self.user, self.permission)
3132 3172
3133 3173
3134 3174 class UserGroupRepoToPerm(Base, BaseModel):
3135 3175 __tablename__ = 'users_group_repo_to_perm'
3136 3176 __table_args__ = (
3137 3177 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3138 3178 base_table_args
3139 3179 )
3140 3180
3141 3181 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3142 3182 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3143 3183 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3144 3184 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3145 3185
3146 3186 users_group = relationship('UserGroup')
3147 3187 permission = relationship('Permission')
3148 3188 repository = relationship('Repository')
3149 3189 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3150 3190
3151 3191 @classmethod
3152 3192 def create(cls, users_group, repository, permission):
3153 3193 n = cls()
3154 3194 n.users_group = users_group
3155 3195 n.repository = repository
3156 3196 n.permission = permission
3157 3197 Session().add(n)
3158 3198 return n
3159 3199
3160 3200 def __unicode__(self):
3161 3201 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3162 3202
3163 3203
3164 3204 class UserGroupUserGroupToPerm(Base, BaseModel):
3165 3205 __tablename__ = 'user_group_user_group_to_perm'
3166 3206 __table_args__ = (
3167 3207 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3168 3208 CheckConstraint('target_user_group_id != user_group_id'),
3169 3209 base_table_args
3170 3210 )
3171 3211
3172 3212 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3173 3213 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3174 3214 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3175 3215 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3176 3216
3177 3217 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3178 3218 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3179 3219 permission = relationship('Permission')
3180 3220
3181 3221 @classmethod
3182 3222 def create(cls, target_user_group, user_group, permission):
3183 3223 n = cls()
3184 3224 n.target_user_group = target_user_group
3185 3225 n.user_group = user_group
3186 3226 n.permission = permission
3187 3227 Session().add(n)
3188 3228 return n
3189 3229
3190 3230 def __unicode__(self):
3191 3231 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3192 3232
3193 3233
3194 3234 class UserGroupToPerm(Base, BaseModel):
3195 3235 __tablename__ = 'users_group_to_perm'
3196 3236 __table_args__ = (
3197 3237 UniqueConstraint('users_group_id', 'permission_id',),
3198 3238 base_table_args
3199 3239 )
3200 3240
3201 3241 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3202 3242 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3203 3243 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3204 3244
3205 3245 users_group = relationship('UserGroup')
3206 3246 permission = relationship('Permission')
3207 3247
3208 3248
3209 3249 class UserRepoGroupToPerm(Base, BaseModel):
3210 3250 __tablename__ = 'user_repo_group_to_perm'
3211 3251 __table_args__ = (
3212 3252 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3213 3253 base_table_args
3214 3254 )
3215 3255
3216 3256 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3217 3257 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3218 3258 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3219 3259 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3220 3260
3221 3261 user = relationship('User')
3222 3262 group = relationship('RepoGroup')
3223 3263 permission = relationship('Permission')
3224 3264
3225 3265 @classmethod
3226 3266 def create(cls, user, repository_group, permission):
3227 3267 n = cls()
3228 3268 n.user = user
3229 3269 n.group = repository_group
3230 3270 n.permission = permission
3231 3271 Session().add(n)
3232 3272 return n
3233 3273
3234 3274
3235 3275 class UserGroupRepoGroupToPerm(Base, BaseModel):
3236 3276 __tablename__ = 'users_group_repo_group_to_perm'
3237 3277 __table_args__ = (
3238 3278 UniqueConstraint('users_group_id', 'group_id'),
3239 3279 base_table_args
3240 3280 )
3241 3281
3242 3282 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3243 3283 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3244 3284 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3245 3285 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3246 3286
3247 3287 users_group = relationship('UserGroup')
3248 3288 permission = relationship('Permission')
3249 3289 group = relationship('RepoGroup')
3250 3290
3251 3291 @classmethod
3252 3292 def create(cls, user_group, repository_group, permission):
3253 3293 n = cls()
3254 3294 n.users_group = user_group
3255 3295 n.group = repository_group
3256 3296 n.permission = permission
3257 3297 Session().add(n)
3258 3298 return n
3259 3299
3260 3300 def __unicode__(self):
3261 3301 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3262 3302
3263 3303
3264 3304 class Statistics(Base, BaseModel):
3265 3305 __tablename__ = 'statistics'
3266 3306 __table_args__ = (
3267 3307 base_table_args
3268 3308 )
3269 3309
3270 3310 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3271 3311 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3272 3312 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3273 3313 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3274 3314 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3275 3315 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3276 3316
3277 3317 repository = relationship('Repository', single_parent=True)
3278 3318
3279 3319
3280 3320 class UserFollowing(Base, BaseModel):
3281 3321 __tablename__ = 'user_followings'
3282 3322 __table_args__ = (
3283 3323 UniqueConstraint('user_id', 'follows_repository_id'),
3284 3324 UniqueConstraint('user_id', 'follows_user_id'),
3285 3325 base_table_args
3286 3326 )
3287 3327
3288 3328 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3289 3329 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3290 3330 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3291 3331 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3292 3332 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3293 3333
3294 3334 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3295 3335
3296 3336 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3297 3337 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3298 3338
3299 3339 @classmethod
3300 3340 def get_repo_followers(cls, repo_id):
3301 3341 return cls.query().filter(cls.follows_repo_id == repo_id)
3302 3342
3303 3343
3304 3344 class CacheKey(Base, BaseModel):
3305 3345 __tablename__ = 'cache_invalidation'
3306 3346 __table_args__ = (
3307 3347 UniqueConstraint('cache_key'),
3308 3348 Index('key_idx', 'cache_key'),
3309 3349 base_table_args,
3310 3350 )
3311 3351
3312 3352 CACHE_TYPE_FEED = 'FEED'
3313 3353 CACHE_TYPE_README = 'README'
3314 3354 # namespaces used to register process/thread aware caches
3315 3355 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3316 3356 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3317 3357
3318 3358 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3319 3359 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3320 3360 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3321 3361 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3322 3362
3323 3363 def __init__(self, cache_key, cache_args=''):
3324 3364 self.cache_key = cache_key
3325 3365 self.cache_args = cache_args
3326 3366 self.cache_active = False
3327 3367
3328 3368 def __unicode__(self):
3329 3369 return u"<%s('%s:%s[%s]')>" % (
3330 3370 self.__class__.__name__,
3331 3371 self.cache_id, self.cache_key, self.cache_active)
3332 3372
3333 3373 def _cache_key_partition(self):
3334 3374 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3335 3375 return prefix, repo_name, suffix
3336 3376
3337 3377 def get_prefix(self):
3338 3378 """
3339 3379 Try to extract prefix from existing cache key. The key could consist
3340 3380 of prefix, repo_name, suffix
3341 3381 """
3342 3382 # this returns prefix, repo_name, suffix
3343 3383 return self._cache_key_partition()[0]
3344 3384
3345 3385 def get_suffix(self):
3346 3386 """
3347 3387 get suffix that might have been used in _get_cache_key to
3348 3388 generate self.cache_key. Only used for informational purposes
3349 3389 in repo_edit.mako.
3350 3390 """
3351 3391 # prefix, repo_name, suffix
3352 3392 return self._cache_key_partition()[2]
3353 3393
3354 3394 @classmethod
3355 3395 def delete_all_cache(cls):
3356 3396 """
3357 3397 Delete all cache keys from database.
3358 3398 Should only be run when all instances are down and all entries
3359 3399 thus stale.
3360 3400 """
3361 3401 cls.query().delete()
3362 3402 Session().commit()
3363 3403
3364 3404 @classmethod
3365 3405 def set_invalidate(cls, cache_uid, delete=False):
3366 3406 """
3367 3407 Mark all caches of a repo as invalid in the database.
3368 3408 """
3369 3409
3370 3410 try:
3371 3411 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3372 3412 if delete:
3373 3413 qry.delete()
3374 3414 log.debug('cache objects deleted for cache args %s',
3375 3415 safe_str(cache_uid))
3376 3416 else:
3377 3417 qry.update({"cache_active": False})
3378 3418 log.debug('cache objects marked as invalid for cache args %s',
3379 3419 safe_str(cache_uid))
3380 3420
3381 3421 Session().commit()
3382 3422 except Exception:
3383 3423 log.exception(
3384 3424 'Cache key invalidation failed for cache args %s',
3385 3425 safe_str(cache_uid))
3386 3426 Session().rollback()
3387 3427
3388 3428 @classmethod
3389 3429 def get_active_cache(cls, cache_key):
3390 3430 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3391 3431 if inv_obj:
3392 3432 return inv_obj
3393 3433 return None
3394 3434
3395 3435
3396 3436 class ChangesetComment(Base, BaseModel):
3397 3437 __tablename__ = 'changeset_comments'
3398 3438 __table_args__ = (
3399 3439 Index('cc_revision_idx', 'revision'),
3400 3440 base_table_args,
3401 3441 )
3402 3442
3403 3443 COMMENT_OUTDATED = u'comment_outdated'
3404 3444 COMMENT_TYPE_NOTE = u'note'
3405 3445 COMMENT_TYPE_TODO = u'todo'
3406 3446 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3407 3447
3408 3448 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3409 3449 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3410 3450 revision = Column('revision', String(40), nullable=True)
3411 3451 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3412 3452 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3413 3453 line_no = Column('line_no', Unicode(10), nullable=True)
3414 3454 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3415 3455 f_path = Column('f_path', Unicode(1000), nullable=True)
3416 3456 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3417 3457 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3418 3458 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3419 3459 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3420 3460 renderer = Column('renderer', Unicode(64), nullable=True)
3421 3461 display_state = Column('display_state', Unicode(128), nullable=True)
3422 3462
3423 3463 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3424 3464 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3425 3465
3426 3466 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3427 3467 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3428 3468
3429 3469 author = relationship('User', lazy='joined')
3430 3470 repo = relationship('Repository')
3431 3471 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3432 3472 pull_request = relationship('PullRequest', lazy='joined')
3433 3473 pull_request_version = relationship('PullRequestVersion')
3434 3474
3435 3475 @classmethod
3436 3476 def get_users(cls, revision=None, pull_request_id=None):
3437 3477 """
3438 3478 Returns user associated with this ChangesetComment. ie those
3439 3479 who actually commented
3440 3480
3441 3481 :param cls:
3442 3482 :param revision:
3443 3483 """
3444 3484 q = Session().query(User)\
3445 3485 .join(ChangesetComment.author)
3446 3486 if revision:
3447 3487 q = q.filter(cls.revision == revision)
3448 3488 elif pull_request_id:
3449 3489 q = q.filter(cls.pull_request_id == pull_request_id)
3450 3490 return q.all()
3451 3491
3452 3492 @classmethod
3453 3493 def get_index_from_version(cls, pr_version, versions):
3454 3494 num_versions = [x.pull_request_version_id for x in versions]
3455 3495 try:
3456 3496 return num_versions.index(pr_version) +1
3457 3497 except (IndexError, ValueError):
3458 3498 return
3459 3499
3460 3500 @property
3461 3501 def outdated(self):
3462 3502 return self.display_state == self.COMMENT_OUTDATED
3463 3503
3464 3504 def outdated_at_version(self, version):
3465 3505 """
3466 3506 Checks if comment is outdated for given pull request version
3467 3507 """
3468 3508 return self.outdated and self.pull_request_version_id != version
3469 3509
3470 3510 def older_than_version(self, version):
3471 3511 """
3472 3512 Checks if comment is made from previous version than given
3473 3513 """
3474 3514 if version is None:
3475 3515 return self.pull_request_version_id is not None
3476 3516
3477 3517 return self.pull_request_version_id < version
3478 3518
3479 3519 @property
3480 3520 def resolved(self):
3481 3521 return self.resolved_by[0] if self.resolved_by else None
3482 3522
3483 3523 @property
3484 3524 def is_todo(self):
3485 3525 return self.comment_type == self.COMMENT_TYPE_TODO
3486 3526
3487 3527 @property
3488 3528 def is_inline(self):
3489 3529 return self.line_no and self.f_path
3490 3530
3491 3531 def get_index_version(self, versions):
3492 3532 return self.get_index_from_version(
3493 3533 self.pull_request_version_id, versions)
3494 3534
3495 3535 def __repr__(self):
3496 3536 if self.comment_id:
3497 3537 return '<DB:Comment #%s>' % self.comment_id
3498 3538 else:
3499 3539 return '<DB:Comment at %#x>' % id(self)
3500 3540
3501 3541 def get_api_data(self):
3502 3542 comment = self
3503 3543 data = {
3504 3544 'comment_id': comment.comment_id,
3505 3545 'comment_type': comment.comment_type,
3506 3546 'comment_text': comment.text,
3507 3547 'comment_status': comment.status_change,
3508 3548 'comment_f_path': comment.f_path,
3509 3549 'comment_lineno': comment.line_no,
3510 3550 'comment_author': comment.author,
3511 3551 'comment_created_on': comment.created_on,
3512 3552 'comment_resolved_by': self.resolved
3513 3553 }
3514 3554 return data
3515 3555
3516 3556 def __json__(self):
3517 3557 data = dict()
3518 3558 data.update(self.get_api_data())
3519 3559 return data
3520 3560
3521 3561
3522 3562 class ChangesetStatus(Base, BaseModel):
3523 3563 __tablename__ = 'changeset_statuses'
3524 3564 __table_args__ = (
3525 3565 Index('cs_revision_idx', 'revision'),
3526 3566 Index('cs_version_idx', 'version'),
3527 3567 UniqueConstraint('repo_id', 'revision', 'version'),
3528 3568 base_table_args
3529 3569 )
3530 3570
3531 3571 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3532 3572 STATUS_APPROVED = 'approved'
3533 3573 STATUS_REJECTED = 'rejected'
3534 3574 STATUS_UNDER_REVIEW = 'under_review'
3535 3575
3536 3576 STATUSES = [
3537 3577 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3538 3578 (STATUS_APPROVED, _("Approved")),
3539 3579 (STATUS_REJECTED, _("Rejected")),
3540 3580 (STATUS_UNDER_REVIEW, _("Under Review")),
3541 3581 ]
3542 3582
3543 3583 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3544 3584 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3545 3585 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3546 3586 revision = Column('revision', String(40), nullable=False)
3547 3587 status = Column('status', String(128), nullable=False, default=DEFAULT)
3548 3588 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3549 3589 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3550 3590 version = Column('version', Integer(), nullable=False, default=0)
3551 3591 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3552 3592
3553 3593 author = relationship('User', lazy='joined')
3554 3594 repo = relationship('Repository')
3555 3595 comment = relationship('ChangesetComment', lazy='joined')
3556 3596 pull_request = relationship('PullRequest', lazy='joined')
3557 3597
3558 3598 def __unicode__(self):
3559 3599 return u"<%s('%s[v%s]:%s')>" % (
3560 3600 self.__class__.__name__,
3561 3601 self.status, self.version, self.author
3562 3602 )
3563 3603
3564 3604 @classmethod
3565 3605 def get_status_lbl(cls, value):
3566 3606 return dict(cls.STATUSES).get(value)
3567 3607
3568 3608 @property
3569 3609 def status_lbl(self):
3570 3610 return ChangesetStatus.get_status_lbl(self.status)
3571 3611
3572 3612 def get_api_data(self):
3573 3613 status = self
3574 3614 data = {
3575 3615 'status_id': status.changeset_status_id,
3576 3616 'status': status.status,
3577 3617 }
3578 3618 return data
3579 3619
3580 3620 def __json__(self):
3581 3621 data = dict()
3582 3622 data.update(self.get_api_data())
3583 3623 return data
3584 3624
3585 3625
3586 3626 class _SetState(object):
3587 3627 """
3588 3628 Context processor allowing changing state for sensitive operation such as
3589 3629 pull request update or merge
3590 3630 """
3591 3631
3592 3632 def __init__(self, pull_request, pr_state, back_state=None):
3593 3633 self._pr = pull_request
3594 3634 self._org_state = back_state or pull_request.pull_request_state
3595 3635 self._pr_state = pr_state
3596 3636
3597 3637 def __enter__(self):
3598 3638 log.debug('StateLock: entering set state context, setting state to: `%s`',
3599 3639 self._pr_state)
3600 3640 self._pr.pull_request_state = self._pr_state
3601 3641 Session().add(self._pr)
3602 3642 Session().commit()
3603 3643
3604 3644 def __exit__(self, exc_type, exc_val, exc_tb):
3605 3645 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3606 3646 self._org_state)
3607 3647 self._pr.pull_request_state = self._org_state
3608 3648 Session().add(self._pr)
3609 3649 Session().commit()
3610 3650
3611 3651
3612 3652 class _PullRequestBase(BaseModel):
3613 3653 """
3614 3654 Common attributes of pull request and version entries.
3615 3655 """
3616 3656
3617 3657 # .status values
3618 3658 STATUS_NEW = u'new'
3619 3659 STATUS_OPEN = u'open'
3620 3660 STATUS_CLOSED = u'closed'
3621 3661
3622 3662 # available states
3623 3663 STATE_CREATING = u'creating'
3624 3664 STATE_UPDATING = u'updating'
3625 3665 STATE_MERGING = u'merging'
3626 3666 STATE_CREATED = u'created'
3627 3667
3628 3668 title = Column('title', Unicode(255), nullable=True)
3629 3669 description = Column(
3630 3670 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3631 3671 nullable=True)
3632 3672 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3633 3673
3634 3674 # new/open/closed status of pull request (not approve/reject/etc)
3635 3675 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3636 3676 created_on = Column(
3637 3677 'created_on', DateTime(timezone=False), nullable=False,
3638 3678 default=datetime.datetime.now)
3639 3679 updated_on = Column(
3640 3680 'updated_on', DateTime(timezone=False), nullable=False,
3641 3681 default=datetime.datetime.now)
3642 3682
3643 3683 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3644 3684
3645 3685 @declared_attr
3646 3686 def user_id(cls):
3647 3687 return Column(
3648 3688 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3649 3689 unique=None)
3650 3690
3651 3691 # 500 revisions max
3652 3692 _revisions = Column(
3653 3693 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3654 3694
3655 3695 @declared_attr
3656 3696 def source_repo_id(cls):
3657 3697 # TODO: dan: rename column to source_repo_id
3658 3698 return Column(
3659 3699 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3660 3700 nullable=False)
3661 3701
3662 3702 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3663 3703
3664 3704 @hybrid_property
3665 3705 def source_ref(self):
3666 3706 return self._source_ref
3667 3707
3668 3708 @source_ref.setter
3669 3709 def source_ref(self, val):
3670 3710 parts = (val or '').split(':')
3671 3711 if len(parts) != 3:
3672 3712 raise ValueError(
3673 3713 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3674 3714 self._source_ref = safe_unicode(val)
3675 3715
3676 3716 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3677 3717
3678 3718 @hybrid_property
3679 3719 def target_ref(self):
3680 3720 return self._target_ref
3681 3721
3682 3722 @target_ref.setter
3683 3723 def target_ref(self, val):
3684 3724 parts = (val or '').split(':')
3685 3725 if len(parts) != 3:
3686 3726 raise ValueError(
3687 3727 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3688 3728 self._target_ref = safe_unicode(val)
3689 3729
3690 3730 @declared_attr
3691 3731 def target_repo_id(cls):
3692 3732 # TODO: dan: rename column to target_repo_id
3693 3733 return Column(
3694 3734 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3695 3735 nullable=False)
3696 3736
3697 3737 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3698 3738
3699 3739 # TODO: dan: rename column to last_merge_source_rev
3700 3740 _last_merge_source_rev = Column(
3701 3741 'last_merge_org_rev', String(40), nullable=True)
3702 3742 # TODO: dan: rename column to last_merge_target_rev
3703 3743 _last_merge_target_rev = Column(
3704 3744 'last_merge_other_rev', String(40), nullable=True)
3705 3745 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3706 3746 merge_rev = Column('merge_rev', String(40), nullable=True)
3707 3747
3708 3748 reviewer_data = Column(
3709 3749 'reviewer_data_json', MutationObj.as_mutable(
3710 3750 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3711 3751
3712 3752 @property
3713 3753 def reviewer_data_json(self):
3714 3754 return json.dumps(self.reviewer_data)
3715 3755
3716 3756 @hybrid_property
3717 3757 def description_safe(self):
3718 3758 from rhodecode.lib import helpers as h
3719 3759 return h.escape(self.description)
3720 3760
3721 3761 @hybrid_property
3722 3762 def revisions(self):
3723 3763 return self._revisions.split(':') if self._revisions else []
3724 3764
3725 3765 @revisions.setter
3726 3766 def revisions(self, val):
3727 3767 self._revisions = ':'.join(val)
3728 3768
3729 3769 @hybrid_property
3730 3770 def last_merge_status(self):
3731 3771 return safe_int(self._last_merge_status)
3732 3772
3733 3773 @last_merge_status.setter
3734 3774 def last_merge_status(self, val):
3735 3775 self._last_merge_status = val
3736 3776
3737 3777 @declared_attr
3738 3778 def author(cls):
3739 3779 return relationship('User', lazy='joined')
3740 3780
3741 3781 @declared_attr
3742 3782 def source_repo(cls):
3743 3783 return relationship(
3744 3784 'Repository',
3745 3785 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3746 3786
3747 3787 @property
3748 3788 def source_ref_parts(self):
3749 3789 return self.unicode_to_reference(self.source_ref)
3750 3790
3751 3791 @declared_attr
3752 3792 def target_repo(cls):
3753 3793 return relationship(
3754 3794 'Repository',
3755 3795 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3756 3796
3757 3797 @property
3758 3798 def target_ref_parts(self):
3759 3799 return self.unicode_to_reference(self.target_ref)
3760 3800
3761 3801 @property
3762 3802 def shadow_merge_ref(self):
3763 3803 return self.unicode_to_reference(self._shadow_merge_ref)
3764 3804
3765 3805 @shadow_merge_ref.setter
3766 3806 def shadow_merge_ref(self, ref):
3767 3807 self._shadow_merge_ref = self.reference_to_unicode(ref)
3768 3808
3769 3809 @staticmethod
3770 3810 def unicode_to_reference(raw):
3771 3811 """
3772 3812 Convert a unicode (or string) to a reference object.
3773 3813 If unicode evaluates to False it returns None.
3774 3814 """
3775 3815 if raw:
3776 3816 refs = raw.split(':')
3777 3817 return Reference(*refs)
3778 3818 else:
3779 3819 return None
3780 3820
3781 3821 @staticmethod
3782 3822 def reference_to_unicode(ref):
3783 3823 """
3784 3824 Convert a reference object to unicode.
3785 3825 If reference is None it returns None.
3786 3826 """
3787 3827 if ref:
3788 3828 return u':'.join(ref)
3789 3829 else:
3790 3830 return None
3791 3831
3792 3832 def get_api_data(self, with_merge_state=True):
3793 3833 from rhodecode.model.pull_request import PullRequestModel
3794 3834
3795 3835 pull_request = self
3796 3836 if with_merge_state:
3797 3837 merge_status = PullRequestModel().merge_status(pull_request)
3798 3838 merge_state = {
3799 3839 'status': merge_status[0],
3800 3840 'message': safe_unicode(merge_status[1]),
3801 3841 }
3802 3842 else:
3803 3843 merge_state = {'status': 'not_available',
3804 3844 'message': 'not_available'}
3805 3845
3806 3846 merge_data = {
3807 3847 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3808 3848 'reference': (
3809 3849 pull_request.shadow_merge_ref._asdict()
3810 3850 if pull_request.shadow_merge_ref else None),
3811 3851 }
3812 3852
3813 3853 data = {
3814 3854 'pull_request_id': pull_request.pull_request_id,
3815 3855 'url': PullRequestModel().get_url(pull_request),
3816 3856 'title': pull_request.title,
3817 3857 'description': pull_request.description,
3818 3858 'status': pull_request.status,
3819 3859 'state': pull_request.pull_request_state,
3820 3860 'created_on': pull_request.created_on,
3821 3861 'updated_on': pull_request.updated_on,
3822 3862 'commit_ids': pull_request.revisions,
3823 3863 'review_status': pull_request.calculated_review_status(),
3824 3864 'mergeable': merge_state,
3825 3865 'source': {
3826 3866 'clone_url': pull_request.source_repo.clone_url(),
3827 3867 'repository': pull_request.source_repo.repo_name,
3828 3868 'reference': {
3829 3869 'name': pull_request.source_ref_parts.name,
3830 3870 'type': pull_request.source_ref_parts.type,
3831 3871 'commit_id': pull_request.source_ref_parts.commit_id,
3832 3872 },
3833 3873 },
3834 3874 'target': {
3835 3875 'clone_url': pull_request.target_repo.clone_url(),
3836 3876 'repository': pull_request.target_repo.repo_name,
3837 3877 'reference': {
3838 3878 'name': pull_request.target_ref_parts.name,
3839 3879 'type': pull_request.target_ref_parts.type,
3840 3880 'commit_id': pull_request.target_ref_parts.commit_id,
3841 3881 },
3842 3882 },
3843 3883 'merge': merge_data,
3844 3884 'author': pull_request.author.get_api_data(include_secrets=False,
3845 3885 details='basic'),
3846 3886 'reviewers': [
3847 3887 {
3848 3888 'user': reviewer.get_api_data(include_secrets=False,
3849 3889 details='basic'),
3850 3890 'reasons': reasons,
3851 3891 'review_status': st[0][1].status if st else 'not_reviewed',
3852 3892 }
3853 3893 for obj, reviewer, reasons, mandatory, st in
3854 3894 pull_request.reviewers_statuses()
3855 3895 ]
3856 3896 }
3857 3897
3858 3898 return data
3859 3899
3860 3900 def set_state(self, pull_request_state, final_state=None):
3861 3901 """
3862 3902 # goes from initial state to updating to initial state.
3863 3903 # initial state can be changed by specifying back_state=
3864 3904 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
3865 3905 pull_request.merge()
3866 3906
3867 3907 :param pull_request_state:
3868 3908 :param final_state:
3869 3909
3870 3910 """
3871 3911
3872 3912 return _SetState(self, pull_request_state, back_state=final_state)
3873 3913
3874 3914
3875 3915 class PullRequest(Base, _PullRequestBase):
3876 3916 __tablename__ = 'pull_requests'
3877 3917 __table_args__ = (
3878 3918 base_table_args,
3879 3919 )
3880 3920
3881 3921 pull_request_id = Column(
3882 3922 'pull_request_id', Integer(), nullable=False, primary_key=True)
3883 3923
3884 3924 def __repr__(self):
3885 3925 if self.pull_request_id:
3886 3926 return '<DB:PullRequest #%s>' % self.pull_request_id
3887 3927 else:
3888 3928 return '<DB:PullRequest at %#x>' % id(self)
3889 3929
3890 3930 reviewers = relationship('PullRequestReviewers',
3891 3931 cascade="all, delete, delete-orphan")
3892 3932 statuses = relationship('ChangesetStatus',
3893 3933 cascade="all, delete, delete-orphan")
3894 3934 comments = relationship('ChangesetComment',
3895 3935 cascade="all, delete, delete-orphan")
3896 3936 versions = relationship('PullRequestVersion',
3897 3937 cascade="all, delete, delete-orphan",
3898 3938 lazy='dynamic')
3899 3939
3900 3940 @classmethod
3901 3941 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3902 3942 internal_methods=None):
3903 3943
3904 3944 class PullRequestDisplay(object):
3905 3945 """
3906 3946 Special object wrapper for showing PullRequest data via Versions
3907 3947 It mimics PR object as close as possible. This is read only object
3908 3948 just for display
3909 3949 """
3910 3950
3911 3951 def __init__(self, attrs, internal=None):
3912 3952 self.attrs = attrs
3913 3953 # internal have priority over the given ones via attrs
3914 3954 self.internal = internal or ['versions']
3915 3955
3916 3956 def __getattr__(self, item):
3917 3957 if item in self.internal:
3918 3958 return getattr(self, item)
3919 3959 try:
3920 3960 return self.attrs[item]
3921 3961 except KeyError:
3922 3962 raise AttributeError(
3923 3963 '%s object has no attribute %s' % (self, item))
3924 3964
3925 3965 def __repr__(self):
3926 3966 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3927 3967
3928 3968 def versions(self):
3929 3969 return pull_request_obj.versions.order_by(
3930 3970 PullRequestVersion.pull_request_version_id).all()
3931 3971
3932 3972 def is_closed(self):
3933 3973 return pull_request_obj.is_closed()
3934 3974
3935 3975 @property
3936 3976 def pull_request_version_id(self):
3937 3977 return getattr(pull_request_obj, 'pull_request_version_id', None)
3938 3978
3939 3979 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3940 3980
3941 3981 attrs.author = StrictAttributeDict(
3942 3982 pull_request_obj.author.get_api_data())
3943 3983 if pull_request_obj.target_repo:
3944 3984 attrs.target_repo = StrictAttributeDict(
3945 3985 pull_request_obj.target_repo.get_api_data())
3946 3986 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3947 3987
3948 3988 if pull_request_obj.source_repo:
3949 3989 attrs.source_repo = StrictAttributeDict(
3950 3990 pull_request_obj.source_repo.get_api_data())
3951 3991 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3952 3992
3953 3993 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3954 3994 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3955 3995 attrs.revisions = pull_request_obj.revisions
3956 3996
3957 3997 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3958 3998 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3959 3999 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3960 4000
3961 4001 return PullRequestDisplay(attrs, internal=internal_methods)
3962 4002
3963 4003 def is_closed(self):
3964 4004 return self.status == self.STATUS_CLOSED
3965 4005
3966 4006 def __json__(self):
3967 4007 return {
3968 4008 'revisions': self.revisions,
3969 4009 }
3970 4010
3971 4011 def calculated_review_status(self):
3972 4012 from rhodecode.model.changeset_status import ChangesetStatusModel
3973 4013 return ChangesetStatusModel().calculated_review_status(self)
3974 4014
3975 4015 def reviewers_statuses(self):
3976 4016 from rhodecode.model.changeset_status import ChangesetStatusModel
3977 4017 return ChangesetStatusModel().reviewers_statuses(self)
3978 4018
3979 4019 @property
3980 4020 def workspace_id(self):
3981 4021 from rhodecode.model.pull_request import PullRequestModel
3982 4022 return PullRequestModel()._workspace_id(self)
3983 4023
3984 4024 def get_shadow_repo(self):
3985 4025 workspace_id = self.workspace_id
3986 4026 vcs_obj = self.target_repo.scm_instance()
3987 4027 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3988 4028 self.target_repo.repo_id, workspace_id)
3989 4029 if os.path.isdir(shadow_repository_path):
3990 4030 return vcs_obj._get_shadow_instance(shadow_repository_path)
3991 4031
3992 4032
3993 4033 class PullRequestVersion(Base, _PullRequestBase):
3994 4034 __tablename__ = 'pull_request_versions'
3995 4035 __table_args__ = (
3996 4036 base_table_args,
3997 4037 )
3998 4038
3999 4039 pull_request_version_id = Column(
4000 4040 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4001 4041 pull_request_id = Column(
4002 4042 'pull_request_id', Integer(),
4003 4043 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4004 4044 pull_request = relationship('PullRequest')
4005 4045
4006 4046 def __repr__(self):
4007 4047 if self.pull_request_version_id:
4008 4048 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4009 4049 else:
4010 4050 return '<DB:PullRequestVersion at %#x>' % id(self)
4011 4051
4012 4052 @property
4013 4053 def reviewers(self):
4014 4054 return self.pull_request.reviewers
4015 4055
4016 4056 @property
4017 4057 def versions(self):
4018 4058 return self.pull_request.versions
4019 4059
4020 4060 def is_closed(self):
4021 4061 # calculate from original
4022 4062 return self.pull_request.status == self.STATUS_CLOSED
4023 4063
4024 4064 def calculated_review_status(self):
4025 4065 return self.pull_request.calculated_review_status()
4026 4066
4027 4067 def reviewers_statuses(self):
4028 4068 return self.pull_request.reviewers_statuses()
4029 4069
4030 4070
4031 4071 class PullRequestReviewers(Base, BaseModel):
4032 4072 __tablename__ = 'pull_request_reviewers'
4033 4073 __table_args__ = (
4034 4074 base_table_args,
4035 4075 )
4036 4076
4037 4077 @hybrid_property
4038 4078 def reasons(self):
4039 4079 if not self._reasons:
4040 4080 return []
4041 4081 return self._reasons
4042 4082
4043 4083 @reasons.setter
4044 4084 def reasons(self, val):
4045 4085 val = val or []
4046 4086 if any(not isinstance(x, compat.string_types) for x in val):
4047 4087 raise Exception('invalid reasons type, must be list of strings')
4048 4088 self._reasons = val
4049 4089
4050 4090 pull_requests_reviewers_id = Column(
4051 4091 'pull_requests_reviewers_id', Integer(), nullable=False,
4052 4092 primary_key=True)
4053 4093 pull_request_id = Column(
4054 4094 "pull_request_id", Integer(),
4055 4095 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4056 4096 user_id = Column(
4057 4097 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4058 4098 _reasons = Column(
4059 4099 'reason', MutationList.as_mutable(
4060 4100 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4061 4101
4062 4102 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4063 4103 user = relationship('User')
4064 4104 pull_request = relationship('PullRequest')
4065 4105
4066 4106 rule_data = Column(
4067 4107 'rule_data_json',
4068 4108 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4069 4109
4070 4110 def rule_user_group_data(self):
4071 4111 """
4072 4112 Returns the voting user group rule data for this reviewer
4073 4113 """
4074 4114
4075 4115 if self.rule_data and 'vote_rule' in self.rule_data:
4076 4116 user_group_data = {}
4077 4117 if 'rule_user_group_entry_id' in self.rule_data:
4078 4118 # means a group with voting rules !
4079 4119 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4080 4120 user_group_data['name'] = self.rule_data['rule_name']
4081 4121 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4082 4122
4083 4123 return user_group_data
4084 4124
4085 4125 def __unicode__(self):
4086 4126 return u"<%s('id:%s')>" % (self.__class__.__name__,
4087 4127 self.pull_requests_reviewers_id)
4088 4128
4089 4129
4090 4130 class Notification(Base, BaseModel):
4091 4131 __tablename__ = 'notifications'
4092 4132 __table_args__ = (
4093 4133 Index('notification_type_idx', 'type'),
4094 4134 base_table_args,
4095 4135 )
4096 4136
4097 4137 TYPE_CHANGESET_COMMENT = u'cs_comment'
4098 4138 TYPE_MESSAGE = u'message'
4099 4139 TYPE_MENTION = u'mention'
4100 4140 TYPE_REGISTRATION = u'registration'
4101 4141 TYPE_PULL_REQUEST = u'pull_request'
4102 4142 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4103 4143
4104 4144 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4105 4145 subject = Column('subject', Unicode(512), nullable=True)
4106 4146 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4107 4147 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4108 4148 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4109 4149 type_ = Column('type', Unicode(255))
4110 4150
4111 4151 created_by_user = relationship('User')
4112 4152 notifications_to_users = relationship('UserNotification', lazy='joined',
4113 4153 cascade="all, delete, delete-orphan")
4114 4154
4115 4155 @property
4116 4156 def recipients(self):
4117 4157 return [x.user for x in UserNotification.query()\
4118 4158 .filter(UserNotification.notification == self)\
4119 4159 .order_by(UserNotification.user_id.asc()).all()]
4120 4160
4121 4161 @classmethod
4122 4162 def create(cls, created_by, subject, body, recipients, type_=None):
4123 4163 if type_ is None:
4124 4164 type_ = Notification.TYPE_MESSAGE
4125 4165
4126 4166 notification = cls()
4127 4167 notification.created_by_user = created_by
4128 4168 notification.subject = subject
4129 4169 notification.body = body
4130 4170 notification.type_ = type_
4131 4171 notification.created_on = datetime.datetime.now()
4132 4172
4133 4173 # For each recipient link the created notification to his account
4134 4174 for u in recipients:
4135 4175 assoc = UserNotification()
4136 4176 assoc.user_id = u.user_id
4137 4177 assoc.notification = notification
4138 4178
4139 4179 # if created_by is inside recipients mark his notification
4140 4180 # as read
4141 4181 if u.user_id == created_by.user_id:
4142 4182 assoc.read = True
4143 4183 Session().add(assoc)
4144 4184
4145 4185 Session().add(notification)
4146 4186
4147 4187 return notification
4148 4188
4149 4189
4150 4190 class UserNotification(Base, BaseModel):
4151 4191 __tablename__ = 'user_to_notification'
4152 4192 __table_args__ = (
4153 4193 UniqueConstraint('user_id', 'notification_id'),
4154 4194 base_table_args
4155 4195 )
4156 4196
4157 4197 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4158 4198 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4159 4199 read = Column('read', Boolean, default=False)
4160 4200 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4161 4201
4162 4202 user = relationship('User', lazy="joined")
4163 4203 notification = relationship('Notification', lazy="joined",
4164 4204 order_by=lambda: Notification.created_on.desc(),)
4165 4205
4166 4206 def mark_as_read(self):
4167 4207 self.read = True
4168 4208 Session().add(self)
4169 4209
4170 4210
4171 4211 class Gist(Base, BaseModel):
4172 4212 __tablename__ = 'gists'
4173 4213 __table_args__ = (
4174 4214 Index('g_gist_access_id_idx', 'gist_access_id'),
4175 4215 Index('g_created_on_idx', 'created_on'),
4176 4216 base_table_args
4177 4217 )
4178 4218
4179 4219 GIST_PUBLIC = u'public'
4180 4220 GIST_PRIVATE = u'private'
4181 4221 DEFAULT_FILENAME = u'gistfile1.txt'
4182 4222
4183 4223 ACL_LEVEL_PUBLIC = u'acl_public'
4184 4224 ACL_LEVEL_PRIVATE = u'acl_private'
4185 4225
4186 4226 gist_id = Column('gist_id', Integer(), primary_key=True)
4187 4227 gist_access_id = Column('gist_access_id', Unicode(250))
4188 4228 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4189 4229 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4190 4230 gist_expires = Column('gist_expires', Float(53), nullable=False)
4191 4231 gist_type = Column('gist_type', Unicode(128), nullable=False)
4192 4232 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4193 4233 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4194 4234 acl_level = Column('acl_level', Unicode(128), nullable=True)
4195 4235
4196 4236 owner = relationship('User')
4197 4237
4198 4238 def __repr__(self):
4199 4239 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4200 4240
4201 4241 @hybrid_property
4202 4242 def description_safe(self):
4203 4243 from rhodecode.lib import helpers as h
4204 4244 return h.escape(self.gist_description)
4205 4245
4206 4246 @classmethod
4207 4247 def get_or_404(cls, id_):
4208 4248 from pyramid.httpexceptions import HTTPNotFound
4209 4249
4210 4250 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4211 4251 if not res:
4212 4252 raise HTTPNotFound()
4213 4253 return res
4214 4254
4215 4255 @classmethod
4216 4256 def get_by_access_id(cls, gist_access_id):
4217 4257 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4218 4258
4219 4259 def gist_url(self):
4220 4260 from rhodecode.model.gist import GistModel
4221 4261 return GistModel().get_url(self)
4222 4262
4223 4263 @classmethod
4224 4264 def base_path(cls):
4225 4265 """
4226 4266 Returns base path when all gists are stored
4227 4267
4228 4268 :param cls:
4229 4269 """
4230 4270 from rhodecode.model.gist import GIST_STORE_LOC
4231 4271 q = Session().query(RhodeCodeUi)\
4232 4272 .filter(RhodeCodeUi.ui_key == URL_SEP)
4233 4273 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4234 4274 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4235 4275
4236 4276 def get_api_data(self):
4237 4277 """
4238 4278 Common function for generating gist related data for API
4239 4279 """
4240 4280 gist = self
4241 4281 data = {
4242 4282 'gist_id': gist.gist_id,
4243 4283 'type': gist.gist_type,
4244 4284 'access_id': gist.gist_access_id,
4245 4285 'description': gist.gist_description,
4246 4286 'url': gist.gist_url(),
4247 4287 'expires': gist.gist_expires,
4248 4288 'created_on': gist.created_on,
4249 4289 'modified_at': gist.modified_at,
4250 4290 'content': None,
4251 4291 'acl_level': gist.acl_level,
4252 4292 }
4253 4293 return data
4254 4294
4255 4295 def __json__(self):
4256 4296 data = dict(
4257 4297 )
4258 4298 data.update(self.get_api_data())
4259 4299 return data
4260 4300 # SCM functions
4261 4301
4262 4302 def scm_instance(self, **kwargs):
4263 4303 """
4264 4304 Get explicit Mercurial repository used
4265 4305 :param kwargs:
4266 4306 :return:
4267 4307 """
4268 4308 from rhodecode.model.gist import GistModel
4269 4309 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4270 4310 return get_vcs_instance(
4271 4311 repo_path=safe_str(full_repo_path), create=False,
4272 4312 _vcs_alias=GistModel.vcs_backend)
4273 4313
4274 4314
4275 4315 class ExternalIdentity(Base, BaseModel):
4276 4316 __tablename__ = 'external_identities'
4277 4317 __table_args__ = (
4278 4318 Index('local_user_id_idx', 'local_user_id'),
4279 4319 Index('external_id_idx', 'external_id'),
4280 4320 base_table_args
4281 4321 )
4282 4322
4283 4323 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4284 4324 external_username = Column('external_username', Unicode(1024), default=u'')
4285 4325 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4286 4326 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4287 4327 access_token = Column('access_token', String(1024), default=u'')
4288 4328 alt_token = Column('alt_token', String(1024), default=u'')
4289 4329 token_secret = Column('token_secret', String(1024), default=u'')
4290 4330
4291 4331 @classmethod
4292 4332 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4293 4333 """
4294 4334 Returns ExternalIdentity instance based on search params
4295 4335
4296 4336 :param external_id:
4297 4337 :param provider_name:
4298 4338 :return: ExternalIdentity
4299 4339 """
4300 4340 query = cls.query()
4301 4341 query = query.filter(cls.external_id == external_id)
4302 4342 query = query.filter(cls.provider_name == provider_name)
4303 4343 if local_user_id:
4304 4344 query = query.filter(cls.local_user_id == local_user_id)
4305 4345 return query.first()
4306 4346
4307 4347 @classmethod
4308 4348 def user_by_external_id_and_provider(cls, external_id, provider_name):
4309 4349 """
4310 4350 Returns User instance based on search params
4311 4351
4312 4352 :param external_id:
4313 4353 :param provider_name:
4314 4354 :return: User
4315 4355 """
4316 4356 query = User.query()
4317 4357 query = query.filter(cls.external_id == external_id)
4318 4358 query = query.filter(cls.provider_name == provider_name)
4319 4359 query = query.filter(User.user_id == cls.local_user_id)
4320 4360 return query.first()
4321 4361
4322 4362 @classmethod
4323 4363 def by_local_user_id(cls, local_user_id):
4324 4364 """
4325 4365 Returns all tokens for user
4326 4366
4327 4367 :param local_user_id:
4328 4368 :return: ExternalIdentity
4329 4369 """
4330 4370 query = cls.query()
4331 4371 query = query.filter(cls.local_user_id == local_user_id)
4332 4372 return query
4333 4373
4334 4374 @classmethod
4335 4375 def load_provider_plugin(cls, plugin_id):
4336 4376 from rhodecode.authentication.base import loadplugin
4337 4377 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4338 4378 auth_plugin = loadplugin(_plugin_id)
4339 4379 return auth_plugin
4340 4380
4341 4381
4342 4382 class Integration(Base, BaseModel):
4343 4383 __tablename__ = 'integrations'
4344 4384 __table_args__ = (
4345 4385 base_table_args
4346 4386 )
4347 4387
4348 4388 integration_id = Column('integration_id', Integer(), primary_key=True)
4349 4389 integration_type = Column('integration_type', String(255))
4350 4390 enabled = Column('enabled', Boolean(), nullable=False)
4351 4391 name = Column('name', String(255), nullable=False)
4352 4392 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4353 4393 default=False)
4354 4394
4355 4395 settings = Column(
4356 4396 'settings_json', MutationObj.as_mutable(
4357 4397 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4358 4398 repo_id = Column(
4359 4399 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4360 4400 nullable=True, unique=None, default=None)
4361 4401 repo = relationship('Repository', lazy='joined')
4362 4402
4363 4403 repo_group_id = Column(
4364 4404 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4365 4405 nullable=True, unique=None, default=None)
4366 4406 repo_group = relationship('RepoGroup', lazy='joined')
4367 4407
4368 4408 @property
4369 4409 def scope(self):
4370 4410 if self.repo:
4371 4411 return repr(self.repo)
4372 4412 if self.repo_group:
4373 4413 if self.child_repos_only:
4374 4414 return repr(self.repo_group) + ' (child repos only)'
4375 4415 else:
4376 4416 return repr(self.repo_group) + ' (recursive)'
4377 4417 if self.child_repos_only:
4378 4418 return 'root_repos'
4379 4419 return 'global'
4380 4420
4381 4421 def __repr__(self):
4382 4422 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4383 4423
4384 4424
4385 4425 class RepoReviewRuleUser(Base, BaseModel):
4386 4426 __tablename__ = 'repo_review_rules_users'
4387 4427 __table_args__ = (
4388 4428 base_table_args
4389 4429 )
4390 4430
4391 4431 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4392 4432 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4393 4433 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4394 4434 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4395 4435 user = relationship('User')
4396 4436
4397 4437 def rule_data(self):
4398 4438 return {
4399 4439 'mandatory': self.mandatory
4400 4440 }
4401 4441
4402 4442
4403 4443 class RepoReviewRuleUserGroup(Base, BaseModel):
4404 4444 __tablename__ = 'repo_review_rules_users_groups'
4405 4445 __table_args__ = (
4406 4446 base_table_args
4407 4447 )
4408 4448
4409 4449 VOTE_RULE_ALL = -1
4410 4450
4411 4451 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4412 4452 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4413 4453 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4414 4454 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4415 4455 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4416 4456 users_group = relationship('UserGroup')
4417 4457
4418 4458 def rule_data(self):
4419 4459 return {
4420 4460 'mandatory': self.mandatory,
4421 4461 'vote_rule': self.vote_rule
4422 4462 }
4423 4463
4424 4464 @property
4425 4465 def vote_rule_label(self):
4426 4466 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4427 4467 return 'all must vote'
4428 4468 else:
4429 4469 return 'min. vote {}'.format(self.vote_rule)
4430 4470
4431 4471
4432 4472 class RepoReviewRule(Base, BaseModel):
4433 4473 __tablename__ = 'repo_review_rules'
4434 4474 __table_args__ = (
4435 4475 base_table_args
4436 4476 )
4437 4477
4438 4478 repo_review_rule_id = Column(
4439 4479 'repo_review_rule_id', Integer(), primary_key=True)
4440 4480 repo_id = Column(
4441 4481 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4442 4482 repo = relationship('Repository', backref='review_rules')
4443 4483
4444 4484 review_rule_name = Column('review_rule_name', String(255))
4445 4485 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4446 4486 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4447 4487 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4448 4488
4449 4489 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4450 4490 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4451 4491 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4452 4492 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4453 4493
4454 4494 rule_users = relationship('RepoReviewRuleUser')
4455 4495 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4456 4496
4457 4497 def _validate_pattern(self, value):
4458 4498 re.compile('^' + glob2re(value) + '$')
4459 4499
4460 4500 @hybrid_property
4461 4501 def source_branch_pattern(self):
4462 4502 return self._branch_pattern or '*'
4463 4503
4464 4504 @source_branch_pattern.setter
4465 4505 def source_branch_pattern(self, value):
4466 4506 self._validate_pattern(value)
4467 4507 self._branch_pattern = value or '*'
4468 4508
4469 4509 @hybrid_property
4470 4510 def target_branch_pattern(self):
4471 4511 return self._target_branch_pattern or '*'
4472 4512
4473 4513 @target_branch_pattern.setter
4474 4514 def target_branch_pattern(self, value):
4475 4515 self._validate_pattern(value)
4476 4516 self._target_branch_pattern = value or '*'
4477 4517
4478 4518 @hybrid_property
4479 4519 def file_pattern(self):
4480 4520 return self._file_pattern or '*'
4481 4521
4482 4522 @file_pattern.setter
4483 4523 def file_pattern(self, value):
4484 4524 self._validate_pattern(value)
4485 4525 self._file_pattern = value or '*'
4486 4526
4487 4527 def matches(self, source_branch, target_branch, files_changed):
4488 4528 """
4489 4529 Check if this review rule matches a branch/files in a pull request
4490 4530
4491 4531 :param source_branch: source branch name for the commit
4492 4532 :param target_branch: target branch name for the commit
4493 4533 :param files_changed: list of file paths changed in the pull request
4494 4534 """
4495 4535
4496 4536 source_branch = source_branch or ''
4497 4537 target_branch = target_branch or ''
4498 4538 files_changed = files_changed or []
4499 4539
4500 4540 branch_matches = True
4501 4541 if source_branch or target_branch:
4502 4542 if self.source_branch_pattern == '*':
4503 4543 source_branch_match = True
4504 4544 else:
4505 4545 if self.source_branch_pattern.startswith('re:'):
4506 4546 source_pattern = self.source_branch_pattern[3:]
4507 4547 else:
4508 4548 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4509 4549 source_branch_regex = re.compile(source_pattern)
4510 4550 source_branch_match = bool(source_branch_regex.search(source_branch))
4511 4551 if self.target_branch_pattern == '*':
4512 4552 target_branch_match = True
4513 4553 else:
4514 4554 if self.target_branch_pattern.startswith('re:'):
4515 4555 target_pattern = self.target_branch_pattern[3:]
4516 4556 else:
4517 4557 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4518 4558 target_branch_regex = re.compile(target_pattern)
4519 4559 target_branch_match = bool(target_branch_regex.search(target_branch))
4520 4560
4521 4561 branch_matches = source_branch_match and target_branch_match
4522 4562
4523 4563 files_matches = True
4524 4564 if self.file_pattern != '*':
4525 4565 files_matches = False
4526 4566 if self.file_pattern.startswith('re:'):
4527 4567 file_pattern = self.file_pattern[3:]
4528 4568 else:
4529 4569 file_pattern = glob2re(self.file_pattern)
4530 4570 file_regex = re.compile(file_pattern)
4531 4571 for filename in files_changed:
4532 4572 if file_regex.search(filename):
4533 4573 files_matches = True
4534 4574 break
4535 4575
4536 4576 return branch_matches and files_matches
4537 4577
4538 4578 @property
4539 4579 def review_users(self):
4540 4580 """ Returns the users which this rule applies to """
4541 4581
4542 4582 users = collections.OrderedDict()
4543 4583
4544 4584 for rule_user in self.rule_users:
4545 4585 if rule_user.user.active:
4546 4586 if rule_user.user not in users:
4547 4587 users[rule_user.user.username] = {
4548 4588 'user': rule_user.user,
4549 4589 'source': 'user',
4550 4590 'source_data': {},
4551 4591 'data': rule_user.rule_data()
4552 4592 }
4553 4593
4554 4594 for rule_user_group in self.rule_user_groups:
4555 4595 source_data = {
4556 4596 'user_group_id': rule_user_group.users_group.users_group_id,
4557 4597 'name': rule_user_group.users_group.users_group_name,
4558 4598 'members': len(rule_user_group.users_group.members)
4559 4599 }
4560 4600 for member in rule_user_group.users_group.members:
4561 4601 if member.user.active:
4562 4602 key = member.user.username
4563 4603 if key in users:
4564 4604 # skip this member as we have him already
4565 4605 # this prevents from override the "first" matched
4566 4606 # users with duplicates in multiple groups
4567 4607 continue
4568 4608
4569 4609 users[key] = {
4570 4610 'user': member.user,
4571 4611 'source': 'user_group',
4572 4612 'source_data': source_data,
4573 4613 'data': rule_user_group.rule_data()
4574 4614 }
4575 4615
4576 4616 return users
4577 4617
4578 4618 def user_group_vote_rule(self, user_id):
4579 4619
4580 4620 rules = []
4581 4621 if not self.rule_user_groups:
4582 4622 return rules
4583 4623
4584 4624 for user_group in self.rule_user_groups:
4585 4625 user_group_members = [x.user_id for x in user_group.users_group.members]
4586 4626 if user_id in user_group_members:
4587 4627 rules.append(user_group)
4588 4628 return rules
4589 4629
4590 4630 def __repr__(self):
4591 4631 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4592 4632 self.repo_review_rule_id, self.repo)
4593 4633
4594 4634
4595 4635 class ScheduleEntry(Base, BaseModel):
4596 4636 __tablename__ = 'schedule_entries'
4597 4637 __table_args__ = (
4598 4638 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4599 4639 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4600 4640 base_table_args,
4601 4641 )
4602 4642
4603 4643 schedule_types = ['crontab', 'timedelta', 'integer']
4604 4644 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4605 4645
4606 4646 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4607 4647 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4608 4648 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4609 4649
4610 4650 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4611 4651 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4612 4652
4613 4653 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4614 4654 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4615 4655
4616 4656 # task
4617 4657 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4618 4658 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4619 4659 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4620 4660 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4621 4661
4622 4662 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4623 4663 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4624 4664
4625 4665 @hybrid_property
4626 4666 def schedule_type(self):
4627 4667 return self._schedule_type
4628 4668
4629 4669 @schedule_type.setter
4630 4670 def schedule_type(self, val):
4631 4671 if val not in self.schedule_types:
4632 4672 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4633 4673 val, self.schedule_type))
4634 4674
4635 4675 self._schedule_type = val
4636 4676
4637 4677 @classmethod
4638 4678 def get_uid(cls, obj):
4639 4679 args = obj.task_args
4640 4680 kwargs = obj.task_kwargs
4641 4681 if isinstance(args, JsonRaw):
4642 4682 try:
4643 4683 args = json.loads(args)
4644 4684 except ValueError:
4645 4685 args = tuple()
4646 4686
4647 4687 if isinstance(kwargs, JsonRaw):
4648 4688 try:
4649 4689 kwargs = json.loads(kwargs)
4650 4690 except ValueError:
4651 4691 kwargs = dict()
4652 4692
4653 4693 dot_notation = obj.task_dot_notation
4654 4694 val = '.'.join(map(safe_str, [
4655 4695 sorted(dot_notation), args, sorted(kwargs.items())]))
4656 4696 return hashlib.sha1(val).hexdigest()
4657 4697
4658 4698 @classmethod
4659 4699 def get_by_schedule_name(cls, schedule_name):
4660 4700 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4661 4701
4662 4702 @classmethod
4663 4703 def get_by_schedule_id(cls, schedule_id):
4664 4704 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4665 4705
4666 4706 @property
4667 4707 def task(self):
4668 4708 return self.task_dot_notation
4669 4709
4670 4710 @property
4671 4711 def schedule(self):
4672 4712 from rhodecode.lib.celerylib.utils import raw_2_schedule
4673 4713 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4674 4714 return schedule
4675 4715
4676 4716 @property
4677 4717 def args(self):
4678 4718 try:
4679 4719 return list(self.task_args or [])
4680 4720 except ValueError:
4681 4721 return list()
4682 4722
4683 4723 @property
4684 4724 def kwargs(self):
4685 4725 try:
4686 4726 return dict(self.task_kwargs or {})
4687 4727 except ValueError:
4688 4728 return dict()
4689 4729
4690 4730 def _as_raw(self, val):
4691 4731 if hasattr(val, 'de_coerce'):
4692 4732 val = val.de_coerce()
4693 4733 if val:
4694 4734 val = json.dumps(val)
4695 4735
4696 4736 return val
4697 4737
4698 4738 @property
4699 4739 def schedule_definition_raw(self):
4700 4740 return self._as_raw(self.schedule_definition)
4701 4741
4702 4742 @property
4703 4743 def args_raw(self):
4704 4744 return self._as_raw(self.task_args)
4705 4745
4706 4746 @property
4707 4747 def kwargs_raw(self):
4708 4748 return self._as_raw(self.task_kwargs)
4709 4749
4710 4750 def __repr__(self):
4711 4751 return '<DB:ScheduleEntry({}:{})>'.format(
4712 4752 self.schedule_entry_id, self.schedule_name)
4713 4753
4714 4754
4715 4755 @event.listens_for(ScheduleEntry, 'before_update')
4716 4756 def update_task_uid(mapper, connection, target):
4717 4757 target.task_uid = ScheduleEntry.get_uid(target)
4718 4758
4719 4759
4720 4760 @event.listens_for(ScheduleEntry, 'before_insert')
4721 4761 def set_task_uid(mapper, connection, target):
4722 4762 target.task_uid = ScheduleEntry.get_uid(target)
4723 4763
4724 4764
4725 4765 class _BaseBranchPerms(BaseModel):
4726 4766 @classmethod
4727 4767 def compute_hash(cls, value):
4728 4768 return sha1_safe(value)
4729 4769
4730 4770 @hybrid_property
4731 4771 def branch_pattern(self):
4732 4772 return self._branch_pattern or '*'
4733 4773
4734 4774 @hybrid_property
4735 4775 def branch_hash(self):
4736 4776 return self._branch_hash
4737 4777
4738 4778 def _validate_glob(self, value):
4739 4779 re.compile('^' + glob2re(value) + '$')
4740 4780
4741 4781 @branch_pattern.setter
4742 4782 def branch_pattern(self, value):
4743 4783 self._validate_glob(value)
4744 4784 self._branch_pattern = value or '*'
4745 4785 # set the Hash when setting the branch pattern
4746 4786 self._branch_hash = self.compute_hash(self._branch_pattern)
4747 4787
4748 4788 def matches(self, branch):
4749 4789 """
4750 4790 Check if this the branch matches entry
4751 4791
4752 4792 :param branch: branch name for the commit
4753 4793 """
4754 4794
4755 4795 branch = branch or ''
4756 4796
4757 4797 branch_matches = True
4758 4798 if branch:
4759 4799 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4760 4800 branch_matches = bool(branch_regex.search(branch))
4761 4801
4762 4802 return branch_matches
4763 4803
4764 4804
4765 4805 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4766 4806 __tablename__ = 'user_to_repo_branch_permissions'
4767 4807 __table_args__ = (
4768 4808 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4769 4809 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4770 4810 )
4771 4811
4772 4812 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4773 4813
4774 4814 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4775 4815 repo = relationship('Repository', backref='user_branch_perms')
4776 4816
4777 4817 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4778 4818 permission = relationship('Permission')
4779 4819
4780 4820 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4781 4821 user_repo_to_perm = relationship('UserRepoToPerm')
4782 4822
4783 4823 rule_order = Column('rule_order', Integer(), nullable=False)
4784 4824 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4785 4825 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4786 4826
4787 4827 def __unicode__(self):
4788 4828 return u'<UserBranchPermission(%s => %r)>' % (
4789 4829 self.user_repo_to_perm, self.branch_pattern)
4790 4830
4791 4831
4792 4832 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4793 4833 __tablename__ = 'user_group_to_repo_branch_permissions'
4794 4834 __table_args__ = (
4795 4835 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4796 4836 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4797 4837 )
4798 4838
4799 4839 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4800 4840
4801 4841 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4802 4842 repo = relationship('Repository', backref='user_group_branch_perms')
4803 4843
4804 4844 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4805 4845 permission = relationship('Permission')
4806 4846
4807 4847 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4808 4848 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4809 4849
4810 4850 rule_order = Column('rule_order', Integer(), nullable=False)
4811 4851 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4812 4852 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4813 4853
4814 4854 def __unicode__(self):
4815 4855 return u'<UserBranchPermission(%s => %r)>' % (
4816 4856 self.user_group_repo_to_perm, self.branch_pattern)
4817 4857
4818 4858
4819 4859 class UserBookmark(Base, BaseModel):
4820 4860 __tablename__ = 'user_bookmarks'
4821 4861 __table_args__ = (
4822 4862 UniqueConstraint('user_id', 'bookmark_repo_id'),
4823 4863 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
4824 4864 UniqueConstraint('user_id', 'bookmark_position'),
4825 4865 base_table_args
4826 4866 )
4827 4867
4828 4868 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
4829 4869 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
4830 4870 position = Column("bookmark_position", Integer(), nullable=False)
4831 4871 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
4832 4872 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
4833 4873 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4834 4874
4835 4875 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
4836 4876 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
4837 4877
4838 4878 user = relationship("User")
4839 4879
4840 4880 repository = relationship("Repository")
4841 4881 repository_group = relationship("RepoGroup")
4842 4882
4843 4883 @classmethod
4844 4884 def get_by_position_for_user(cls, position, user_id):
4845 4885 return cls.query() \
4846 4886 .filter(UserBookmark.user_id == user_id) \
4847 4887 .filter(UserBookmark.position == position).scalar()
4848 4888
4849 4889 @classmethod
4850 4890 def get_bookmarks_for_user(cls, user_id):
4851 4891 return cls.query() \
4852 4892 .filter(UserBookmark.user_id == user_id) \
4853 4893 .options(joinedload(UserBookmark.repository)) \
4854 4894 .options(joinedload(UserBookmark.repository_group)) \
4855 4895 .order_by(UserBookmark.position.asc()) \
4856 4896 .all()
4857 4897
4858 4898 def __unicode__(self):
4859 4899 return u'<UserBookmark(%d @ %r)>' % (self.position, self.redirect_url)
4860 4900
4861 4901
4862 4902 class FileStore(Base, BaseModel):
4863 4903 __tablename__ = 'file_store'
4864 4904 __table_args__ = (
4865 4905 base_table_args
4866 4906 )
4867 4907
4868 4908 file_store_id = Column('file_store_id', Integer(), primary_key=True)
4869 4909 file_uid = Column('file_uid', String(1024), nullable=False)
4870 4910 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
4871 4911 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
4872 4912 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
4873 4913
4874 4914 # sha256 hash
4875 4915 file_hash = Column('file_hash', String(512), nullable=False)
4876 4916 file_size = Column('file_size', Integer(), nullable=False)
4877 4917
4878 4918 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4879 4919 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
4880 4920 accessed_count = Column('accessed_count', Integer(), default=0)
4881 4921
4882 4922 enabled = Column('enabled', Boolean(), nullable=False, default=True)
4883 4923
4884 4924 # if repo/repo_group reference is set, check for permissions
4885 4925 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
4886 4926
4887 4927 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
4888 4928 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
4889 4929
4890 4930 # scope limited to user, which requester have access to
4891 4931 scope_user_id = Column(
4892 4932 'scope_user_id', Integer(), ForeignKey('users.user_id'),
4893 4933 nullable=True, unique=None, default=None)
4894 4934 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
4895 4935
4896 4936 # scope limited to user group, which requester have access to
4897 4937 scope_user_group_id = Column(
4898 4938 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
4899 4939 nullable=True, unique=None, default=None)
4900 4940 user_group = relationship('UserGroup', lazy='joined')
4901 4941
4902 4942 # scope limited to repo, which requester have access to
4903 4943 scope_repo_id = Column(
4904 4944 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4905 4945 nullable=True, unique=None, default=None)
4906 4946 repo = relationship('Repository', lazy='joined')
4907 4947
4908 4948 # scope limited to repo group, which requester have access to
4909 4949 scope_repo_group_id = Column(
4910 4950 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
4911 4951 nullable=True, unique=None, default=None)
4912 4952 repo_group = relationship('RepoGroup', lazy='joined')
4913 4953
4914 4954 @classmethod
4915 4955 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
4916 4956 file_description='', enabled=True, check_acl=True,
4917 4957 user_id=None, scope_repo_id=None, scope_repo_group_id=None):
4918 4958
4919 4959 store_entry = FileStore()
4920 4960 store_entry.file_uid = file_uid
4921 4961 store_entry.file_display_name = file_display_name
4922 4962 store_entry.file_org_name = filename
4923 4963 store_entry.file_size = file_size
4924 4964 store_entry.file_hash = file_hash
4925 4965 store_entry.file_description = file_description
4926 4966
4927 4967 store_entry.check_acl = check_acl
4928 4968 store_entry.enabled = enabled
4929 4969
4930 4970 store_entry.user_id = user_id
4931 4971 store_entry.scope_repo_id = scope_repo_id
4932 4972 store_entry.scope_repo_group_id = scope_repo_group_id
4933 4973 return store_entry
4934 4974
4935 4975 @classmethod
4936 4976 def bump_access_counter(cls, file_uid, commit=True):
4937 4977 FileStore().query()\
4938 4978 .filter(FileStore.file_uid == file_uid)\
4939 4979 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
4940 4980 FileStore.accessed_on: datetime.datetime.now()})
4941 4981 if commit:
4942 4982 Session().commit()
4943 4983
4944 4984 def __repr__(self):
4945 4985 return '<FileStore({})>'.format(self.file_store_id)
4946 4986
4947 4987
4948 4988 class DbMigrateVersion(Base, BaseModel):
4949 4989 __tablename__ = 'db_migrate_version'
4950 4990 __table_args__ = (
4951 4991 base_table_args,
4952 4992 )
4953 4993
4954 4994 repository_id = Column('repository_id', String(250), primary_key=True)
4955 4995 repository_path = Column('repository_path', Text)
4956 4996 version = Column('version', Integer)
4957 4997
4958 4998 @classmethod
4959 4999 def set_version(cls, version):
4960 5000 """
4961 5001 Helper for forcing a different version, usually for debugging purposes via ishell.
4962 5002 """
4963 5003 ver = DbMigrateVersion.query().first()
4964 5004 ver.version = version
4965 5005 Session().commit()
4966 5006
4967 5007
4968 5008 class DbSession(Base, BaseModel):
4969 5009 __tablename__ = 'db_session'
4970 5010 __table_args__ = (
4971 5011 base_table_args,
4972 5012 )
4973 5013
4974 5014 def __repr__(self):
4975 5015 return '<DB:DbSession({})>'.format(self.id)
4976 5016
4977 5017 id = Column('id', Integer())
4978 5018 namespace = Column('namespace', String(255), primary_key=True)
4979 5019 accessed = Column('accessed', DateTime, nullable=False)
4980 5020 created = Column('created', DateTime, nullable=False)
4981 5021 data = Column('data', PickleType, nullable=False)
@@ -1,1720 +1,1739 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 686 try:
687 687 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 688 except CommitDoesNotExistError:
689 689 return UpdateResponse(
690 690 executed=False,
691 691 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 692 old=pull_request, new=None, changes=None,
693 693 source_changed=False, target_changed=False)
694 694
695 695 source_changed = source_ref_id != source_commit.raw_id
696 696
697 697 # target repo
698 698 target_repo = pull_request.target_repo.scm_instance()
699 699 try:
700 700 target_commit = target_repo.get_commit(commit_id=target_ref_name)
701 701 except CommitDoesNotExistError:
702 702 return UpdateResponse(
703 703 executed=False,
704 704 reason=UpdateFailureReason.MISSING_TARGET_REF,
705 705 old=pull_request, new=None, changes=None,
706 706 source_changed=False, target_changed=False)
707 707 target_changed = target_ref_id != target_commit.raw_id
708 708
709 709 if not (source_changed or target_changed):
710 710 log.debug("Nothing changed in pull request %s", pull_request)
711 711 return UpdateResponse(
712 712 executed=False,
713 713 reason=UpdateFailureReason.NO_CHANGE,
714 714 old=pull_request, new=None, changes=None,
715 715 source_changed=target_changed, target_changed=source_changed)
716 716
717 717 change_in_found = 'target repo' if target_changed else 'source repo'
718 718 log.debug('Updating pull request because of change in %s detected',
719 719 change_in_found)
720 720
721 721 # Finally there is a need for an update, in case of source change
722 722 # we create a new version, else just an update
723 723 if source_changed:
724 724 pull_request_version = self._create_version_from_snapshot(pull_request)
725 725 self._link_comments_to_version(pull_request_version)
726 726 else:
727 727 try:
728 728 ver = pull_request.versions[-1]
729 729 except IndexError:
730 730 ver = None
731 731
732 732 pull_request.pull_request_version_id = \
733 733 ver.pull_request_version_id if ver else None
734 734 pull_request_version = pull_request
735 735
736 736 try:
737 737 if target_ref_type in self.REF_TYPES:
738 738 target_commit = target_repo.get_commit(target_ref_name)
739 739 else:
740 740 target_commit = target_repo.get_commit(target_ref_id)
741 741 except CommitDoesNotExistError:
742 742 return UpdateResponse(
743 743 executed=False,
744 744 reason=UpdateFailureReason.MISSING_TARGET_REF,
745 745 old=pull_request, new=None, changes=None,
746 746 source_changed=source_changed, target_changed=target_changed)
747 747
748 748 # re-compute commit ids
749 749 old_commit_ids = pull_request.revisions
750 750 pre_load = ["author", "branch", "date", "message"]
751 751 commit_ranges = target_repo.compare(
752 752 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
753 753 pre_load=pre_load)
754 754
755 755 ancestor = target_repo.get_common_ancestor(
756 756 target_commit.raw_id, source_commit.raw_id, source_repo)
757 757
758 758 pull_request.source_ref = '%s:%s:%s' % (
759 759 source_ref_type, source_ref_name, source_commit.raw_id)
760 760 pull_request.target_ref = '%s:%s:%s' % (
761 761 target_ref_type, target_ref_name, ancestor)
762 762
763 763 pull_request.revisions = [
764 764 commit.raw_id for commit in reversed(commit_ranges)]
765 765 pull_request.updated_on = datetime.datetime.now()
766 766 Session().add(pull_request)
767 767 new_commit_ids = pull_request.revisions
768 768
769 769 old_diff_data, new_diff_data = self._generate_update_diffs(
770 770 pull_request, pull_request_version)
771 771
772 772 # calculate commit and file changes
773 773 changes = self._calculate_commit_id_changes(
774 774 old_commit_ids, new_commit_ids)
775 775 file_changes = self._calculate_file_changes(
776 776 old_diff_data, new_diff_data)
777 777
778 778 # set comments as outdated if DIFFS changed
779 779 CommentsModel().outdate_comments(
780 780 pull_request, old_diff_data=old_diff_data,
781 781 new_diff_data=new_diff_data)
782 782
783 783 commit_changes = (changes.added or changes.removed)
784 784 file_node_changes = (
785 785 file_changes.added or file_changes.modified or file_changes.removed)
786 786 pr_has_changes = commit_changes or file_node_changes
787 787
788 788 # Add an automatic comment to the pull request, in case
789 789 # anything has changed
790 790 if pr_has_changes:
791 791 update_comment = CommentsModel().create(
792 792 text=self._render_update_message(changes, file_changes),
793 793 repo=pull_request.target_repo,
794 794 user=pull_request.author,
795 795 pull_request=pull_request,
796 796 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
797 797
798 798 # Update status to "Under Review" for added commits
799 799 for commit_id in changes.added:
800 800 ChangesetStatusModel().set_status(
801 801 repo=pull_request.source_repo,
802 802 status=ChangesetStatus.STATUS_UNDER_REVIEW,
803 803 comment=update_comment,
804 804 user=pull_request.author,
805 805 pull_request=pull_request,
806 806 revision=commit_id)
807 807
808 808 log.debug(
809 809 'Updated pull request %s, added_ids: %s, common_ids: %s, '
810 810 'removed_ids: %s', pull_request.pull_request_id,
811 811 changes.added, changes.common, changes.removed)
812 812 log.debug(
813 813 'Updated pull request with the following file changes: %s',
814 814 file_changes)
815 815
816 816 log.info(
817 817 "Updated pull request %s from commit %s to commit %s, "
818 818 "stored new version %s of this pull request.",
819 819 pull_request.pull_request_id, source_ref_id,
820 820 pull_request.source_ref_parts.commit_id,
821 821 pull_request_version.pull_request_version_id)
822 822 Session().commit()
823 823 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
824 824
825 825 return UpdateResponse(
826 826 executed=True, reason=UpdateFailureReason.NONE,
827 827 old=pull_request, new=pull_request_version, changes=changes,
828 828 source_changed=source_changed, target_changed=target_changed)
829 829
830 830 def _create_version_from_snapshot(self, pull_request):
831 831 version = PullRequestVersion()
832 832 version.title = pull_request.title
833 833 version.description = pull_request.description
834 834 version.status = pull_request.status
835 835 version.pull_request_state = pull_request.pull_request_state
836 836 version.created_on = datetime.datetime.now()
837 837 version.updated_on = pull_request.updated_on
838 838 version.user_id = pull_request.user_id
839 839 version.source_repo = pull_request.source_repo
840 840 version.source_ref = pull_request.source_ref
841 841 version.target_repo = pull_request.target_repo
842 842 version.target_ref = pull_request.target_ref
843 843
844 844 version._last_merge_source_rev = pull_request._last_merge_source_rev
845 845 version._last_merge_target_rev = pull_request._last_merge_target_rev
846 846 version.last_merge_status = pull_request.last_merge_status
847 847 version.shadow_merge_ref = pull_request.shadow_merge_ref
848 848 version.merge_rev = pull_request.merge_rev
849 849 version.reviewer_data = pull_request.reviewer_data
850 850
851 851 version.revisions = pull_request.revisions
852 852 version.pull_request = pull_request
853 853 Session().add(version)
854 854 Session().flush()
855 855
856 856 return version
857 857
858 858 def _generate_update_diffs(self, pull_request, pull_request_version):
859 859
860 860 diff_context = (
861 861 self.DIFF_CONTEXT +
862 862 CommentsModel.needed_extra_diff_context())
863 863 hide_whitespace_changes = False
864 864 source_repo = pull_request_version.source_repo
865 865 source_ref_id = pull_request_version.source_ref_parts.commit_id
866 866 target_ref_id = pull_request_version.target_ref_parts.commit_id
867 867 old_diff = self._get_diff_from_pr_or_version(
868 868 source_repo, source_ref_id, target_ref_id,
869 869 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
870 870
871 871 source_repo = pull_request.source_repo
872 872 source_ref_id = pull_request.source_ref_parts.commit_id
873 873 target_ref_id = pull_request.target_ref_parts.commit_id
874 874
875 875 new_diff = self._get_diff_from_pr_or_version(
876 876 source_repo, source_ref_id, target_ref_id,
877 877 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
878 878
879 879 old_diff_data = diffs.DiffProcessor(old_diff)
880 880 old_diff_data.prepare()
881 881 new_diff_data = diffs.DiffProcessor(new_diff)
882 882 new_diff_data.prepare()
883 883
884 884 return old_diff_data, new_diff_data
885 885
886 886 def _link_comments_to_version(self, pull_request_version):
887 887 """
888 888 Link all unlinked comments of this pull request to the given version.
889 889
890 890 :param pull_request_version: The `PullRequestVersion` to which
891 891 the comments shall be linked.
892 892
893 893 """
894 894 pull_request = pull_request_version.pull_request
895 895 comments = ChangesetComment.query()\
896 896 .filter(
897 897 # TODO: johbo: Should we query for the repo at all here?
898 898 # Pending decision on how comments of PRs are to be related
899 899 # to either the source repo, the target repo or no repo at all.
900 900 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
901 901 ChangesetComment.pull_request == pull_request,
902 902 ChangesetComment.pull_request_version == None)\
903 903 .order_by(ChangesetComment.comment_id.asc())
904 904
905 905 # TODO: johbo: Find out why this breaks if it is done in a bulk
906 906 # operation.
907 907 for comment in comments:
908 908 comment.pull_request_version_id = (
909 909 pull_request_version.pull_request_version_id)
910 910 Session().add(comment)
911 911
912 912 def _calculate_commit_id_changes(self, old_ids, new_ids):
913 913 added = [x for x in new_ids if x not in old_ids]
914 914 common = [x for x in new_ids if x in old_ids]
915 915 removed = [x for x in old_ids if x not in new_ids]
916 916 total = new_ids
917 917 return ChangeTuple(added, common, removed, total)
918 918
919 919 def _calculate_file_changes(self, old_diff_data, new_diff_data):
920 920
921 921 old_files = OrderedDict()
922 922 for diff_data in old_diff_data.parsed_diff:
923 923 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
924 924
925 925 added_files = []
926 926 modified_files = []
927 927 removed_files = []
928 928 for diff_data in new_diff_data.parsed_diff:
929 929 new_filename = diff_data['filename']
930 930 new_hash = md5_safe(diff_data['raw_diff'])
931 931
932 932 old_hash = old_files.get(new_filename)
933 933 if not old_hash:
934 934 # file is not present in old diff, means it's added
935 935 added_files.append(new_filename)
936 936 else:
937 937 if new_hash != old_hash:
938 938 modified_files.append(new_filename)
939 939 # now remove a file from old, since we have seen it already
940 940 del old_files[new_filename]
941 941
942 942 # removed files is when there are present in old, but not in NEW,
943 943 # since we remove old files that are present in new diff, left-overs
944 944 # if any should be the removed files
945 945 removed_files.extend(old_files.keys())
946 946
947 947 return FileChangeTuple(added_files, modified_files, removed_files)
948 948
949 949 def _render_update_message(self, changes, file_changes):
950 950 """
951 951 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
952 952 so it's always looking the same disregarding on which default
953 953 renderer system is using.
954 954
955 955 :param changes: changes named tuple
956 956 :param file_changes: file changes named tuple
957 957
958 958 """
959 959 new_status = ChangesetStatus.get_status_lbl(
960 960 ChangesetStatus.STATUS_UNDER_REVIEW)
961 961
962 962 changed_files = (
963 963 file_changes.added + file_changes.modified + file_changes.removed)
964 964
965 965 params = {
966 966 'under_review_label': new_status,
967 967 'added_commits': changes.added,
968 968 'removed_commits': changes.removed,
969 969 'changed_files': changed_files,
970 970 'added_files': file_changes.added,
971 971 'modified_files': file_changes.modified,
972 972 'removed_files': file_changes.removed,
973 973 }
974 974 renderer = RstTemplateRenderer()
975 975 return renderer.render('pull_request_update.mako', **params)
976 976
977 977 def edit(self, pull_request, title, description, description_renderer, user):
978 978 pull_request = self.__get_pull_request(pull_request)
979 979 old_data = pull_request.get_api_data(with_merge_state=False)
980 980 if pull_request.is_closed():
981 981 raise ValueError('This pull request is closed')
982 982 if title:
983 983 pull_request.title = title
984 984 pull_request.description = description
985 985 pull_request.updated_on = datetime.datetime.now()
986 986 pull_request.description_renderer = description_renderer
987 987 Session().add(pull_request)
988 988 self._log_audit_action(
989 989 'repo.pull_request.edit', {'old_data': old_data},
990 990 user, pull_request)
991 991
992 992 def update_reviewers(self, pull_request, reviewer_data, user):
993 993 """
994 994 Update the reviewers in the pull request
995 995
996 996 :param pull_request: the pr to update
997 997 :param reviewer_data: list of tuples
998 998 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
999 999 """
1000 1000 pull_request = self.__get_pull_request(pull_request)
1001 1001 if pull_request.is_closed():
1002 1002 raise ValueError('This pull request is closed')
1003 1003
1004 1004 reviewers = {}
1005 1005 for user_id, reasons, mandatory, rules in reviewer_data:
1006 1006 if isinstance(user_id, (int, compat.string_types)):
1007 1007 user_id = self._get_user(user_id).user_id
1008 1008 reviewers[user_id] = {
1009 1009 'reasons': reasons, 'mandatory': mandatory}
1010 1010
1011 1011 reviewers_ids = set(reviewers.keys())
1012 1012 current_reviewers = PullRequestReviewers.query()\
1013 1013 .filter(PullRequestReviewers.pull_request ==
1014 1014 pull_request).all()
1015 1015 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1016 1016
1017 1017 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1018 1018 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1019 1019
1020 1020 log.debug("Adding %s reviewers", ids_to_add)
1021 1021 log.debug("Removing %s reviewers", ids_to_remove)
1022 1022 changed = False
1023 added_audit_reviewers = []
1024 removed_audit_reviewers = []
1025
1023 1026 for uid in ids_to_add:
1024 1027 changed = True
1025 1028 _usr = self._get_user(uid)
1026 1029 reviewer = PullRequestReviewers()
1027 1030 reviewer.user = _usr
1028 1031 reviewer.pull_request = pull_request
1029 1032 reviewer.reasons = reviewers[uid]['reasons']
1030 1033 # NOTE(marcink): mandatory shouldn't be changed now
1031 1034 # reviewer.mandatory = reviewers[uid]['reasons']
1032 1035 Session().add(reviewer)
1033 self._log_audit_action(
1034 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1035 user, pull_request)
1036 added_audit_reviewers.append(reviewer.get_dict())
1036 1037
1037 1038 for uid in ids_to_remove:
1038 1039 changed = True
1040 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1041 # that prevents and fixes cases that we added the same reviewer twice.
1042 # this CAN happen due to the lack of DB checks
1039 1043 reviewers = PullRequestReviewers.query()\
1040 1044 .filter(PullRequestReviewers.user_id == uid,
1041 1045 PullRequestReviewers.pull_request == pull_request)\
1042 1046 .all()
1043 # use .all() in case we accidentally added the same person twice
1044 # this CAN happen due to the lack of DB checks
1047
1045 1048 for obj in reviewers:
1046 old_data = obj.get_dict()
1049 added_audit_reviewers.append(obj.get_dict())
1047 1050 Session().delete(obj)
1048 self._log_audit_action(
1049 'repo.pull_request.reviewer.delete',
1050 {'old_data': old_data}, user, pull_request)
1051 1051
1052 1052 if changed:
1053 Session().expire_all()
1053 1054 pull_request.updated_on = datetime.datetime.now()
1054 1055 Session().add(pull_request)
1055 1056
1057 # finally store audit logs
1058 for user_data in added_audit_reviewers:
1059 self._log_audit_action(
1060 'repo.pull_request.reviewer.add', {'data': user_data},
1061 user, pull_request)
1062 for user_data in removed_audit_reviewers:
1063 self._log_audit_action(
1064 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1065 user, pull_request)
1066
1056 1067 self.notify_reviewers(pull_request, ids_to_add)
1057 1068 return ids_to_add, ids_to_remove
1058 1069
1059 1070 def get_url(self, pull_request, request=None, permalink=False):
1060 1071 if not request:
1061 1072 request = get_current_request()
1062 1073
1063 1074 if permalink:
1064 1075 return request.route_url(
1065 1076 'pull_requests_global',
1066 1077 pull_request_id=pull_request.pull_request_id,)
1067 1078 else:
1068 1079 return request.route_url('pullrequest_show',
1069 1080 repo_name=safe_str(pull_request.target_repo.repo_name),
1070 1081 pull_request_id=pull_request.pull_request_id,)
1071 1082
1072 1083 def get_shadow_clone_url(self, pull_request, request=None):
1073 1084 """
1074 1085 Returns qualified url pointing to the shadow repository. If this pull
1075 1086 request is closed there is no shadow repository and ``None`` will be
1076 1087 returned.
1077 1088 """
1078 1089 if pull_request.is_closed():
1079 1090 return None
1080 1091 else:
1081 1092 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1082 1093 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1083 1094
1084 1095 def notify_reviewers(self, pull_request, reviewers_ids):
1085 1096 # notification to reviewers
1086 1097 if not reviewers_ids:
1087 1098 return
1088 1099
1089 1100 pull_request_obj = pull_request
1090 1101 # get the current participants of this pull request
1091 1102 recipients = reviewers_ids
1092 1103 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1093 1104
1094 1105 pr_source_repo = pull_request_obj.source_repo
1095 1106 pr_target_repo = pull_request_obj.target_repo
1096 1107
1097 1108 pr_url = h.route_url('pullrequest_show',
1098 1109 repo_name=pr_target_repo.repo_name,
1099 1110 pull_request_id=pull_request_obj.pull_request_id,)
1100 1111
1101 1112 # set some variables for email notification
1102 1113 pr_target_repo_url = h.route_url(
1103 1114 'repo_summary', repo_name=pr_target_repo.repo_name)
1104 1115
1105 1116 pr_source_repo_url = h.route_url(
1106 1117 'repo_summary', repo_name=pr_source_repo.repo_name)
1107 1118
1108 1119 # pull request specifics
1109 1120 pull_request_commits = [
1110 1121 (x.raw_id, x.message)
1111 1122 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1112 1123
1113 1124 kwargs = {
1114 1125 'user': pull_request.author,
1115 1126 'pull_request': pull_request_obj,
1116 1127 'pull_request_commits': pull_request_commits,
1117 1128
1118 1129 'pull_request_target_repo': pr_target_repo,
1119 1130 'pull_request_target_repo_url': pr_target_repo_url,
1120 1131
1121 1132 'pull_request_source_repo': pr_source_repo,
1122 1133 'pull_request_source_repo_url': pr_source_repo_url,
1123 1134
1124 1135 'pull_request_url': pr_url,
1125 1136 }
1126 1137
1127 1138 # pre-generate the subject for notification itself
1128 1139 (subject,
1129 1140 _h, _e, # we don't care about those
1130 1141 body_plaintext) = EmailNotificationModel().render_email(
1131 1142 notification_type, **kwargs)
1132 1143
1133 1144 # create notification objects, and emails
1134 1145 NotificationModel().create(
1135 1146 created_by=pull_request.author,
1136 1147 notification_subject=subject,
1137 1148 notification_body=body_plaintext,
1138 1149 notification_type=notification_type,
1139 1150 recipients=recipients,
1140 1151 email_kwargs=kwargs,
1141 1152 )
1142 1153
1143 1154 def delete(self, pull_request, user):
1144 1155 pull_request = self.__get_pull_request(pull_request)
1145 1156 old_data = pull_request.get_api_data(with_merge_state=False)
1146 1157 self._cleanup_merge_workspace(pull_request)
1147 1158 self._log_audit_action(
1148 1159 'repo.pull_request.delete', {'old_data': old_data},
1149 1160 user, pull_request)
1150 1161 Session().delete(pull_request)
1151 1162
1152 1163 def close_pull_request(self, pull_request, user):
1153 1164 pull_request = self.__get_pull_request(pull_request)
1154 1165 self._cleanup_merge_workspace(pull_request)
1155 1166 pull_request.status = PullRequest.STATUS_CLOSED
1156 1167 pull_request.updated_on = datetime.datetime.now()
1157 1168 Session().add(pull_request)
1158 1169 self.trigger_pull_request_hook(
1159 1170 pull_request, pull_request.author, 'close')
1160 1171
1161 1172 pr_data = pull_request.get_api_data(with_merge_state=False)
1162 1173 self._log_audit_action(
1163 1174 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1164 1175
1165 1176 def close_pull_request_with_comment(
1166 1177 self, pull_request, user, repo, message=None, auth_user=None):
1167 1178
1168 1179 pull_request_review_status = pull_request.calculated_review_status()
1169 1180
1170 1181 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1171 1182 # approved only if we have voting consent
1172 1183 status = ChangesetStatus.STATUS_APPROVED
1173 1184 else:
1174 1185 status = ChangesetStatus.STATUS_REJECTED
1175 1186 status_lbl = ChangesetStatus.get_status_lbl(status)
1176 1187
1177 1188 default_message = (
1178 1189 'Closing with status change {transition_icon} {status}.'
1179 1190 ).format(transition_icon='>', status=status_lbl)
1180 1191 text = message or default_message
1181 1192
1182 1193 # create a comment, and link it to new status
1183 1194 comment = CommentsModel().create(
1184 1195 text=text,
1185 1196 repo=repo.repo_id,
1186 1197 user=user.user_id,
1187 1198 pull_request=pull_request.pull_request_id,
1188 1199 status_change=status_lbl,
1189 1200 status_change_type=status,
1190 1201 closing_pr=True,
1191 1202 auth_user=auth_user,
1192 1203 )
1193 1204
1194 1205 # calculate old status before we change it
1195 1206 old_calculated_status = pull_request.calculated_review_status()
1196 1207 ChangesetStatusModel().set_status(
1197 1208 repo.repo_id,
1198 1209 status,
1199 1210 user.user_id,
1200 1211 comment=comment,
1201 1212 pull_request=pull_request.pull_request_id
1202 1213 )
1203 1214
1204 1215 Session().flush()
1205 1216 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1206 1217 # we now calculate the status of pull request again, and based on that
1207 1218 # calculation trigger status change. This might happen in cases
1208 1219 # that non-reviewer admin closes a pr, which means his vote doesn't
1209 1220 # change the status, while if he's a reviewer this might change it.
1210 1221 calculated_status = pull_request.calculated_review_status()
1211 1222 if old_calculated_status != calculated_status:
1212 1223 self.trigger_pull_request_hook(
1213 1224 pull_request, user, 'review_status_change',
1214 1225 data={'status': calculated_status})
1215 1226
1216 1227 # finally close the PR
1217 1228 PullRequestModel().close_pull_request(
1218 1229 pull_request.pull_request_id, user)
1219 1230
1220 1231 return comment, status
1221 1232
1222 1233 def merge_status(self, pull_request, translator=None,
1223 1234 force_shadow_repo_refresh=False):
1224 1235 _ = translator or get_current_request().translate
1225 1236
1226 1237 if not self._is_merge_enabled(pull_request):
1227 1238 return False, _('Server-side pull request merging is disabled.')
1228 1239 if pull_request.is_closed():
1229 1240 return False, _('This pull request is closed.')
1230 1241 merge_possible, msg = self._check_repo_requirements(
1231 1242 target=pull_request.target_repo, source=pull_request.source_repo,
1232 1243 translator=_)
1233 1244 if not merge_possible:
1234 1245 return merge_possible, msg
1235 1246
1236 1247 try:
1237 1248 resp = self._try_merge(
1238 1249 pull_request,
1239 1250 force_shadow_repo_refresh=force_shadow_repo_refresh)
1240 1251 log.debug("Merge response: %s", resp)
1241 1252 status = resp.possible, resp.merge_status_message
1242 1253 except NotImplementedError:
1243 1254 status = False, _('Pull request merging is not supported.')
1244 1255
1245 1256 return status
1246 1257
1247 1258 def _check_repo_requirements(self, target, source, translator):
1248 1259 """
1249 1260 Check if `target` and `source` have compatible requirements.
1250 1261
1251 1262 Currently this is just checking for largefiles.
1252 1263 """
1253 1264 _ = translator
1254 1265 target_has_largefiles = self._has_largefiles(target)
1255 1266 source_has_largefiles = self._has_largefiles(source)
1256 1267 merge_possible = True
1257 1268 message = u''
1258 1269
1259 1270 if target_has_largefiles != source_has_largefiles:
1260 1271 merge_possible = False
1261 1272 if source_has_largefiles:
1262 1273 message = _(
1263 1274 'Target repository large files support is disabled.')
1264 1275 else:
1265 1276 message = _(
1266 1277 'Source repository large files support is disabled.')
1267 1278
1268 1279 return merge_possible, message
1269 1280
1270 1281 def _has_largefiles(self, repo):
1271 1282 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1272 1283 'extensions', 'largefiles')
1273 1284 return largefiles_ui and largefiles_ui[0].active
1274 1285
1275 1286 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1276 1287 """
1277 1288 Try to merge the pull request and return the merge status.
1278 1289 """
1279 1290 log.debug(
1280 1291 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1281 1292 pull_request.pull_request_id, force_shadow_repo_refresh)
1282 1293 target_vcs = pull_request.target_repo.scm_instance()
1283 1294 # Refresh the target reference.
1284 1295 try:
1285 1296 target_ref = self._refresh_reference(
1286 1297 pull_request.target_ref_parts, target_vcs)
1287 1298 except CommitDoesNotExistError:
1288 1299 merge_state = MergeResponse(
1289 1300 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1290 1301 metadata={'target_ref': pull_request.target_ref_parts})
1291 1302 return merge_state
1292 1303
1293 1304 target_locked = pull_request.target_repo.locked
1294 1305 if target_locked and target_locked[0]:
1295 1306 locked_by = 'user:{}'.format(target_locked[0])
1296 1307 log.debug("The target repository is locked by %s.", locked_by)
1297 1308 merge_state = MergeResponse(
1298 1309 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1299 1310 metadata={'locked_by': locked_by})
1300 1311 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1301 1312 pull_request, target_ref):
1302 1313 log.debug("Refreshing the merge status of the repository.")
1303 1314 merge_state = self._refresh_merge_state(
1304 1315 pull_request, target_vcs, target_ref)
1305 1316 else:
1306 1317 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1307 1318 metadata = {
1308 1319 'target_ref': pull_request.target_ref_parts,
1309 'source_ref': pull_request.source_ref_parts
1320 'source_ref': pull_request.source_ref_parts,
1310 1321 }
1322 if not possible and target_ref.type == 'branch':
1323 # NOTE(marcink): case for mercurial multiple heads on branch
1324 heads = target_vcs._heads(target_ref.name)
1325 if len(heads) != 1:
1326 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1327 metadata.update({
1328 'heads': heads
1329 })
1311 1330 merge_state = MergeResponse(
1312 1331 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1313 1332
1314 1333 return merge_state
1315 1334
1316 1335 def _refresh_reference(self, reference, vcs_repository):
1317 1336 if reference.type in self.UPDATABLE_REF_TYPES:
1318 1337 name_or_id = reference.name
1319 1338 else:
1320 1339 name_or_id = reference.commit_id
1321 1340 refreshed_commit = vcs_repository.get_commit(name_or_id)
1322 1341 refreshed_reference = Reference(
1323 1342 reference.type, reference.name, refreshed_commit.raw_id)
1324 1343 return refreshed_reference
1325 1344
1326 1345 def _needs_merge_state_refresh(self, pull_request, target_reference):
1327 1346 return not(
1328 1347 pull_request.revisions and
1329 1348 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1330 1349 target_reference.commit_id == pull_request._last_merge_target_rev)
1331 1350
1332 1351 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1333 1352 workspace_id = self._workspace_id(pull_request)
1334 1353 source_vcs = pull_request.source_repo.scm_instance()
1335 1354 repo_id = pull_request.target_repo.repo_id
1336 1355 use_rebase = self._use_rebase_for_merging(pull_request)
1337 1356 close_branch = self._close_branch_before_merging(pull_request)
1338 1357 merge_state = target_vcs.merge(
1339 1358 repo_id, workspace_id,
1340 1359 target_reference, source_vcs, pull_request.source_ref_parts,
1341 1360 dry_run=True, use_rebase=use_rebase,
1342 1361 close_branch=close_branch)
1343 1362
1344 1363 # Do not store the response if there was an unknown error.
1345 1364 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1346 1365 pull_request._last_merge_source_rev = \
1347 1366 pull_request.source_ref_parts.commit_id
1348 1367 pull_request._last_merge_target_rev = target_reference.commit_id
1349 1368 pull_request.last_merge_status = merge_state.failure_reason
1350 1369 pull_request.shadow_merge_ref = merge_state.merge_ref
1351 1370 Session().add(pull_request)
1352 1371 Session().commit()
1353 1372
1354 1373 return merge_state
1355 1374
1356 1375 def _workspace_id(self, pull_request):
1357 1376 workspace_id = 'pr-%s' % pull_request.pull_request_id
1358 1377 return workspace_id
1359 1378
1360 1379 def generate_repo_data(self, repo, commit_id=None, branch=None,
1361 1380 bookmark=None, translator=None):
1362 1381 from rhodecode.model.repo import RepoModel
1363 1382
1364 1383 all_refs, selected_ref = \
1365 1384 self._get_repo_pullrequest_sources(
1366 1385 repo.scm_instance(), commit_id=commit_id,
1367 1386 branch=branch, bookmark=bookmark, translator=translator)
1368 1387
1369 1388 refs_select2 = []
1370 1389 for element in all_refs:
1371 1390 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1372 1391 refs_select2.append({'text': element[1], 'children': children})
1373 1392
1374 1393 return {
1375 1394 'user': {
1376 1395 'user_id': repo.user.user_id,
1377 1396 'username': repo.user.username,
1378 1397 'firstname': repo.user.first_name,
1379 1398 'lastname': repo.user.last_name,
1380 1399 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1381 1400 },
1382 1401 'name': repo.repo_name,
1383 1402 'link': RepoModel().get_url(repo),
1384 1403 'description': h.chop_at_smart(repo.description_safe, '\n'),
1385 1404 'refs': {
1386 1405 'all_refs': all_refs,
1387 1406 'selected_ref': selected_ref,
1388 1407 'select2_refs': refs_select2
1389 1408 }
1390 1409 }
1391 1410
1392 1411 def generate_pullrequest_title(self, source, source_ref, target):
1393 1412 return u'{source}#{at_ref} to {target}'.format(
1394 1413 source=source,
1395 1414 at_ref=source_ref,
1396 1415 target=target,
1397 1416 )
1398 1417
1399 1418 def _cleanup_merge_workspace(self, pull_request):
1400 1419 # Merging related cleanup
1401 1420 repo_id = pull_request.target_repo.repo_id
1402 1421 target_scm = pull_request.target_repo.scm_instance()
1403 1422 workspace_id = self._workspace_id(pull_request)
1404 1423
1405 1424 try:
1406 1425 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1407 1426 except NotImplementedError:
1408 1427 pass
1409 1428
1410 1429 def _get_repo_pullrequest_sources(
1411 1430 self, repo, commit_id=None, branch=None, bookmark=None,
1412 1431 translator=None):
1413 1432 """
1414 1433 Return a structure with repo's interesting commits, suitable for
1415 1434 the selectors in pullrequest controller
1416 1435
1417 1436 :param commit_id: a commit that must be in the list somehow
1418 1437 and selected by default
1419 1438 :param branch: a branch that must be in the list and selected
1420 1439 by default - even if closed
1421 1440 :param bookmark: a bookmark that must be in the list and selected
1422 1441 """
1423 1442 _ = translator or get_current_request().translate
1424 1443
1425 1444 commit_id = safe_str(commit_id) if commit_id else None
1426 1445 branch = safe_unicode(branch) if branch else None
1427 1446 bookmark = safe_unicode(bookmark) if bookmark else None
1428 1447
1429 1448 selected = None
1430 1449
1431 1450 # order matters: first source that has commit_id in it will be selected
1432 1451 sources = []
1433 1452 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1434 1453 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1435 1454
1436 1455 if commit_id:
1437 1456 ref_commit = (h.short_id(commit_id), commit_id)
1438 1457 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1439 1458
1440 1459 sources.append(
1441 1460 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1442 1461 )
1443 1462
1444 1463 groups = []
1445 1464
1446 1465 for group_key, ref_list, group_name, match in sources:
1447 1466 group_refs = []
1448 1467 for ref_name, ref_id in ref_list:
1449 1468 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1450 1469 group_refs.append((ref_key, ref_name))
1451 1470
1452 1471 if not selected:
1453 1472 if set([commit_id, match]) & set([ref_id, ref_name]):
1454 1473 selected = ref_key
1455 1474
1456 1475 if group_refs:
1457 1476 groups.append((group_refs, group_name))
1458 1477
1459 1478 if not selected:
1460 1479 ref = commit_id or branch or bookmark
1461 1480 if ref:
1462 1481 raise CommitDoesNotExistError(
1463 1482 u'No commit refs could be found matching: {}'.format(ref))
1464 1483 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1465 1484 selected = u'branch:{}:{}'.format(
1466 1485 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1467 1486 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1468 1487 )
1469 1488 elif repo.commit_ids:
1470 1489 # make the user select in this case
1471 1490 selected = None
1472 1491 else:
1473 1492 raise EmptyRepositoryError()
1474 1493 return groups, selected
1475 1494
1476 1495 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1477 1496 hide_whitespace_changes, diff_context):
1478 1497
1479 1498 return self._get_diff_from_pr_or_version(
1480 1499 source_repo, source_ref_id, target_ref_id,
1481 1500 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1482 1501
1483 1502 def _get_diff_from_pr_or_version(
1484 1503 self, source_repo, source_ref_id, target_ref_id,
1485 1504 hide_whitespace_changes, diff_context):
1486 1505
1487 1506 target_commit = source_repo.get_commit(
1488 1507 commit_id=safe_str(target_ref_id))
1489 1508 source_commit = source_repo.get_commit(
1490 1509 commit_id=safe_str(source_ref_id))
1491 1510 if isinstance(source_repo, Repository):
1492 1511 vcs_repo = source_repo.scm_instance()
1493 1512 else:
1494 1513 vcs_repo = source_repo
1495 1514
1496 1515 # TODO: johbo: In the context of an update, we cannot reach
1497 1516 # the old commit anymore with our normal mechanisms. It needs
1498 1517 # some sort of special support in the vcs layer to avoid this
1499 1518 # workaround.
1500 1519 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1501 1520 vcs_repo.alias == 'git'):
1502 1521 source_commit.raw_id = safe_str(source_ref_id)
1503 1522
1504 1523 log.debug('calculating diff between '
1505 1524 'source_ref:%s and target_ref:%s for repo `%s`',
1506 1525 target_ref_id, source_ref_id,
1507 1526 safe_unicode(vcs_repo.path))
1508 1527
1509 1528 vcs_diff = vcs_repo.get_diff(
1510 1529 commit1=target_commit, commit2=source_commit,
1511 1530 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1512 1531 return vcs_diff
1513 1532
1514 1533 def _is_merge_enabled(self, pull_request):
1515 1534 return self._get_general_setting(
1516 1535 pull_request, 'rhodecode_pr_merge_enabled')
1517 1536
1518 1537 def _use_rebase_for_merging(self, pull_request):
1519 1538 repo_type = pull_request.target_repo.repo_type
1520 1539 if repo_type == 'hg':
1521 1540 return self._get_general_setting(
1522 1541 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1523 1542 elif repo_type == 'git':
1524 1543 return self._get_general_setting(
1525 1544 pull_request, 'rhodecode_git_use_rebase_for_merging')
1526 1545
1527 1546 return False
1528 1547
1529 1548 def _close_branch_before_merging(self, pull_request):
1530 1549 repo_type = pull_request.target_repo.repo_type
1531 1550 if repo_type == 'hg':
1532 1551 return self._get_general_setting(
1533 1552 pull_request, 'rhodecode_hg_close_branch_before_merging')
1534 1553 elif repo_type == 'git':
1535 1554 return self._get_general_setting(
1536 1555 pull_request, 'rhodecode_git_close_branch_before_merging')
1537 1556
1538 1557 return False
1539 1558
1540 1559 def _get_general_setting(self, pull_request, settings_key, default=False):
1541 1560 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1542 1561 settings = settings_model.get_general_settings()
1543 1562 return settings.get(settings_key, default)
1544 1563
1545 1564 def _log_audit_action(self, action, action_data, user, pull_request):
1546 1565 audit_logger.store(
1547 1566 action=action,
1548 1567 action_data=action_data,
1549 1568 user=user,
1550 1569 repo=pull_request.target_repo)
1551 1570
1552 1571 def get_reviewer_functions(self):
1553 1572 """
1554 1573 Fetches functions for validation and fetching default reviewers.
1555 1574 If available we use the EE package, else we fallback to CE
1556 1575 package functions
1557 1576 """
1558 1577 try:
1559 1578 from rc_reviewers.utils import get_default_reviewers_data
1560 1579 from rc_reviewers.utils import validate_default_reviewers
1561 1580 except ImportError:
1562 1581 from rhodecode.apps.repository.utils import get_default_reviewers_data
1563 1582 from rhodecode.apps.repository.utils import validate_default_reviewers
1564 1583
1565 1584 return get_default_reviewers_data, validate_default_reviewers
1566 1585
1567 1586
1568 1587 class MergeCheck(object):
1569 1588 """
1570 1589 Perform Merge Checks and returns a check object which stores information
1571 1590 about merge errors, and merge conditions
1572 1591 """
1573 1592 TODO_CHECK = 'todo'
1574 1593 PERM_CHECK = 'perm'
1575 1594 REVIEW_CHECK = 'review'
1576 1595 MERGE_CHECK = 'merge'
1577 1596
1578 1597 def __init__(self):
1579 1598 self.review_status = None
1580 1599 self.merge_possible = None
1581 1600 self.merge_msg = ''
1582 1601 self.failed = None
1583 1602 self.errors = []
1584 1603 self.error_details = OrderedDict()
1585 1604
1586 1605 def push_error(self, error_type, message, error_key, details):
1587 1606 self.failed = True
1588 1607 self.errors.append([error_type, message])
1589 1608 self.error_details[error_key] = dict(
1590 1609 details=details,
1591 1610 error_type=error_type,
1592 1611 message=message
1593 1612 )
1594 1613
1595 1614 @classmethod
1596 1615 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1597 1616 force_shadow_repo_refresh=False):
1598 1617 _ = translator
1599 1618 merge_check = cls()
1600 1619
1601 1620 # permissions to merge
1602 1621 user_allowed_to_merge = PullRequestModel().check_user_merge(
1603 1622 pull_request, auth_user)
1604 1623 if not user_allowed_to_merge:
1605 1624 log.debug("MergeCheck: cannot merge, approval is pending.")
1606 1625
1607 1626 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1608 1627 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1609 1628 if fail_early:
1610 1629 return merge_check
1611 1630
1612 1631 # permission to merge into the target branch
1613 1632 target_commit_id = pull_request.target_ref_parts.commit_id
1614 1633 if pull_request.target_ref_parts.type == 'branch':
1615 1634 branch_name = pull_request.target_ref_parts.name
1616 1635 else:
1617 1636 # for mercurial we can always figure out the branch from the commit
1618 1637 # in case of bookmark
1619 1638 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1620 1639 branch_name = target_commit.branch
1621 1640
1622 1641 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1623 1642 pull_request.target_repo.repo_name, branch_name)
1624 1643 if branch_perm and branch_perm == 'branch.none':
1625 1644 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1626 1645 branch_name, rule)
1627 1646 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1628 1647 if fail_early:
1629 1648 return merge_check
1630 1649
1631 1650 # review status, must be always present
1632 1651 review_status = pull_request.calculated_review_status()
1633 1652 merge_check.review_status = review_status
1634 1653
1635 1654 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1636 1655 if not status_approved:
1637 1656 log.debug("MergeCheck: cannot merge, approval is pending.")
1638 1657
1639 1658 msg = _('Pull request reviewer approval is pending.')
1640 1659
1641 1660 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1642 1661
1643 1662 if fail_early:
1644 1663 return merge_check
1645 1664
1646 1665 # left over TODOs
1647 1666 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1648 1667 if todos:
1649 1668 log.debug("MergeCheck: cannot merge, {} "
1650 1669 "unresolved TODOs left.".format(len(todos)))
1651 1670
1652 1671 if len(todos) == 1:
1653 1672 msg = _('Cannot merge, {} TODO still not resolved.').format(
1654 1673 len(todos))
1655 1674 else:
1656 1675 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1657 1676 len(todos))
1658 1677
1659 1678 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1660 1679
1661 1680 if fail_early:
1662 1681 return merge_check
1663 1682
1664 1683 # merge possible, here is the filesystem simulation + shadow repo
1665 1684 merge_status, msg = PullRequestModel().merge_status(
1666 1685 pull_request, translator=translator,
1667 1686 force_shadow_repo_refresh=force_shadow_repo_refresh)
1668 1687 merge_check.merge_possible = merge_status
1669 1688 merge_check.merge_msg = msg
1670 1689 if not merge_status:
1671 1690 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1672 1691 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1673 1692
1674 1693 if fail_early:
1675 1694 return merge_check
1676 1695
1677 1696 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1678 1697 return merge_check
1679 1698
1680 1699 @classmethod
1681 1700 def get_merge_conditions(cls, pull_request, translator):
1682 1701 _ = translator
1683 1702 merge_details = {}
1684 1703
1685 1704 model = PullRequestModel()
1686 1705 use_rebase = model._use_rebase_for_merging(pull_request)
1687 1706
1688 1707 if use_rebase:
1689 1708 merge_details['merge_strategy'] = dict(
1690 1709 details={},
1691 1710 message=_('Merge strategy: rebase')
1692 1711 )
1693 1712 else:
1694 1713 merge_details['merge_strategy'] = dict(
1695 1714 details={},
1696 1715 message=_('Merge strategy: explicit merge commit')
1697 1716 )
1698 1717
1699 1718 close_branch = model._close_branch_before_merging(pull_request)
1700 1719 if close_branch:
1701 1720 repo_type = pull_request.target_repo.repo_type
1702 1721 close_msg = ''
1703 1722 if repo_type == 'hg':
1704 1723 close_msg = _('Source branch will be closed after merge.')
1705 1724 elif repo_type == 'git':
1706 1725 close_msg = _('Source branch will be deleted after merge.')
1707 1726
1708 1727 merge_details['close_branch'] = dict(
1709 1728 details={},
1710 1729 message=close_msg
1711 1730 )
1712 1731
1713 1732 return merge_details
1714 1733
1715 1734
1716 1735 ChangeTuple = collections.namedtuple(
1717 1736 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1718 1737
1719 1738 FileChangeTuple = collections.namedtuple(
1720 1739 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,845 +1,888 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import hashlib
23 23 import logging
24 24 from collections import namedtuple
25 25 from functools import wraps
26 26 import bleach
27 27
28 28 from rhodecode.lib import rc_cache
29 29 from rhodecode.lib.utils2 import (
30 30 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
31 31 from rhodecode.lib.vcs.backends import base
32 32 from rhodecode.model import BaseModel
33 33 from rhodecode.model.db import (
34 34 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting, CacheKey)
35 35 from rhodecode.model.meta import Session
36 36
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 UiSetting = namedtuple(
42 42 'UiSetting', ['section', 'key', 'value', 'active'])
43 43
44 44 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
45 45
46 46
47 47 class SettingNotFound(Exception):
48 48 def __init__(self, setting_id):
49 49 msg = 'Setting `{}` is not found'.format(setting_id)
50 50 super(SettingNotFound, self).__init__(msg)
51 51
52 52
53 53 class SettingsModel(BaseModel):
54 54 BUILTIN_HOOKS = (
55 55 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
56 56 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
57 57 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
58 58 RhodeCodeUi.HOOK_PUSH_KEY,)
59 59 HOOKS_SECTION = 'hooks'
60 60
61 61 def __init__(self, sa=None, repo=None):
62 62 self.repo = repo
63 63 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
64 64 self.SettingsDbModel = (
65 65 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
66 66 super(SettingsModel, self).__init__(sa)
67 67
68 68 def get_ui_by_key(self, key):
69 69 q = self.UiDbModel.query()
70 70 q = q.filter(self.UiDbModel.ui_key == key)
71 71 q = self._filter_by_repo(RepoRhodeCodeUi, q)
72 72 return q.scalar()
73 73
74 74 def get_ui_by_section(self, section):
75 75 q = self.UiDbModel.query()
76 76 q = q.filter(self.UiDbModel.ui_section == section)
77 77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 78 return q.all()
79 79
80 80 def get_ui_by_section_and_key(self, section, key):
81 81 q = self.UiDbModel.query()
82 82 q = q.filter(self.UiDbModel.ui_section == section)
83 83 q = q.filter(self.UiDbModel.ui_key == key)
84 84 q = self._filter_by_repo(RepoRhodeCodeUi, q)
85 85 return q.scalar()
86 86
87 87 def get_ui(self, section=None, key=None):
88 88 q = self.UiDbModel.query()
89 89 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90 90
91 91 if section:
92 92 q = q.filter(self.UiDbModel.ui_section == section)
93 93 if key:
94 94 q = q.filter(self.UiDbModel.ui_key == key)
95 95
96 96 # TODO: mikhail: add caching
97 97 result = [
98 98 UiSetting(
99 99 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
100 100 value=safe_str(r.ui_value), active=r.ui_active
101 101 )
102 102 for r in q.all()
103 103 ]
104 104 return result
105 105
106 106 def get_builtin_hooks(self):
107 107 q = self.UiDbModel.query()
108 108 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
109 109 return self._get_hooks(q)
110 110
111 111 def get_custom_hooks(self):
112 112 q = self.UiDbModel.query()
113 113 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 114 return self._get_hooks(q)
115 115
116 116 def create_ui_section_value(self, section, val, key=None, active=True):
117 117 new_ui = self.UiDbModel()
118 118 new_ui.ui_section = section
119 119 new_ui.ui_value = val
120 120 new_ui.ui_active = active
121 121
122 repository_id = ''
122 123 if self.repo:
123 124 repo = self._get_repo(self.repo)
124 125 repository_id = repo.repo_id
125 126 new_ui.repository_id = repository_id
126 127
127 128 if not key:
128 129 # keys are unique so they need appended info
129 130 if self.repo:
130 131 key = hashlib.sha1(
131 132 '{}{}{}'.format(section, val, repository_id)).hexdigest()
132 133 else:
133 134 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
134 135
135 136 new_ui.ui_key = key
136 137
137 138 Session().add(new_ui)
138 139 return new_ui
139 140
140 141 def create_or_update_hook(self, key, value):
141 142 ui = (
142 143 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
143 144 self.UiDbModel())
144 145 ui.ui_section = self.HOOKS_SECTION
145 146 ui.ui_active = True
146 147 ui.ui_key = key
147 148 ui.ui_value = value
148 149
149 150 if self.repo:
150 151 repo = self._get_repo(self.repo)
151 152 repository_id = repo.repo_id
152 153 ui.repository_id = repository_id
153 154
154 155 Session().add(ui)
155 156 return ui
156 157
157 158 def delete_ui(self, id_):
158 159 ui = self.UiDbModel.get(id_)
159 160 if not ui:
160 161 raise SettingNotFound(id_)
161 162 Session().delete(ui)
162 163
163 164 def get_setting_by_name(self, name):
164 165 q = self._get_settings_query()
165 166 q = q.filter(self.SettingsDbModel.app_settings_name == name)
166 167 return q.scalar()
167 168
168 169 def create_or_update_setting(
169 170 self, name, val=Optional(''), type_=Optional('unicode')):
170 171 """
171 172 Creates or updates RhodeCode setting. If updates is triggered it will
172 173 only update parameters that are explicityl set Optional instance will
173 174 be skipped
174 175
175 176 :param name:
176 177 :param val:
177 178 :param type_:
178 179 :return:
179 180 """
180 181
181 182 res = self.get_setting_by_name(name)
182 183 repo = self._get_repo(self.repo) if self.repo else None
183 184
184 185 if not res:
185 186 val = Optional.extract(val)
186 187 type_ = Optional.extract(type_)
187 188
188 189 args = (
189 190 (repo.repo_id, name, val, type_)
190 191 if repo else (name, val, type_))
191 192 res = self.SettingsDbModel(*args)
192 193
193 194 else:
194 195 if self.repo:
195 196 res.repository_id = repo.repo_id
196 197
197 198 res.app_settings_name = name
198 199 if not isinstance(type_, Optional):
199 200 # update if set
200 201 res.app_settings_type = type_
201 202 if not isinstance(val, Optional):
202 203 # update if set
203 204 res.app_settings_value = val
204 205
205 206 Session().add(res)
206 207 return res
207 208
208 209 def invalidate_settings_cache(self):
209 210 invalidation_namespace = CacheKey.SETTINGS_INVALIDATION_NAMESPACE
210 211 CacheKey.set_invalidate(invalidation_namespace)
211 212
212 213 def get_all_settings(self, cache=False):
213 214 region = rc_cache.get_or_create_region('sql_cache_short')
214 215 invalidation_namespace = CacheKey.SETTINGS_INVALIDATION_NAMESPACE
215 216
216 217 @region.conditional_cache_on_arguments(condition=cache)
217 218 def _get_all_settings(name, key):
218 219 q = self._get_settings_query()
219 220 if not q:
220 221 raise Exception('Could not get application settings !')
221 222
222 223 settings = {
223 224 'rhodecode_' + result.app_settings_name: result.app_settings_value
224 225 for result in q
225 226 }
226 227 return settings
227 228
228 229 repo = self._get_repo(self.repo) if self.repo else None
229 230 key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app"
230 231
231 232 inv_context_manager = rc_cache.InvalidationContext(
232 233 uid='cache_settings', invalidation_namespace=invalidation_namespace)
233 234 with inv_context_manager as invalidation_context:
234 235 # check for stored invalidation signal, and maybe purge the cache
235 236 # before computing it again
236 237 if invalidation_context.should_invalidate():
237 238 # NOTE:(marcink) we flush the whole sql_cache_short region, because it
238 239 # reads different settings etc. It's little too much but those caches
239 240 # are anyway very short lived and it's a safest way.
240 241 region = rc_cache.get_or_create_region('sql_cache_short')
241 242 region.invalidate()
242 243
243 244 result = _get_all_settings('rhodecode_settings', key)
244 245 log.debug('Fetching app settings for key: %s took: %.3fs', key,
245 246 inv_context_manager.compute_time)
246 247
247 248 return result
248 249
249 250 def get_auth_settings(self):
250 251 q = self._get_settings_query()
251 252 q = q.filter(
252 253 self.SettingsDbModel.app_settings_name.startswith('auth_'))
253 254 rows = q.all()
254 255 auth_settings = {
255 256 row.app_settings_name: row.app_settings_value for row in rows}
256 257 return auth_settings
257 258
258 259 def get_auth_plugins(self):
259 260 auth_plugins = self.get_setting_by_name("auth_plugins")
260 261 return auth_plugins.app_settings_value
261 262
262 263 def get_default_repo_settings(self, strip_prefix=False):
263 264 q = self._get_settings_query()
264 265 q = q.filter(
265 266 self.SettingsDbModel.app_settings_name.startswith('default_'))
266 267 rows = q.all()
267 268
268 269 result = {}
269 270 for row in rows:
270 271 key = row.app_settings_name
271 272 if strip_prefix:
272 273 key = remove_prefix(key, prefix='default_')
273 274 result.update({key: row.app_settings_value})
274 275 return result
275 276
276 277 def get_repo(self):
277 278 repo = self._get_repo(self.repo)
278 279 if not repo:
279 280 raise Exception(
280 281 'Repository `{}` cannot be found inside the database'.format(
281 282 self.repo))
282 283 return repo
283 284
284 285 def _filter_by_repo(self, model, query):
285 286 if self.repo:
286 287 repo = self.get_repo()
287 288 query = query.filter(model.repository_id == repo.repo_id)
288 289 return query
289 290
290 291 def _get_hooks(self, query):
291 292 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
292 293 query = self._filter_by_repo(RepoRhodeCodeUi, query)
293 294 return query.all()
294 295
295 296 def _get_settings_query(self):
296 297 q = self.SettingsDbModel.query()
297 298 return self._filter_by_repo(RepoRhodeCodeSetting, q)
298 299
299 300 def list_enabled_social_plugins(self, settings):
300 301 enabled = []
301 302 for plug in SOCIAL_PLUGINS_LIST:
302 303 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
303 304 )):
304 305 enabled.append(plug)
305 306 return enabled
306 307
307 308
308 309 def assert_repo_settings(func):
309 310 @wraps(func)
310 311 def _wrapper(self, *args, **kwargs):
311 312 if not self.repo_settings:
312 313 raise Exception('Repository is not specified')
313 314 return func(self, *args, **kwargs)
314 315 return _wrapper
315 316
316 317
317 318 class IssueTrackerSettingsModel(object):
318 319 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
319 320 SETTINGS_PREFIX = 'issuetracker_'
320 321
321 322 def __init__(self, sa=None, repo=None):
322 323 self.global_settings = SettingsModel(sa=sa)
323 324 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
324 325
325 326 @property
326 327 def inherit_global_settings(self):
327 328 if not self.repo_settings:
328 329 return True
329 330 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
330 331 return setting.app_settings_value if setting else True
331 332
332 333 @inherit_global_settings.setter
333 334 def inherit_global_settings(self, value):
334 335 if self.repo_settings:
335 336 settings = self.repo_settings.create_or_update_setting(
336 337 self.INHERIT_SETTINGS, value, type_='bool')
337 338 Session().add(settings)
338 339
339 340 def _get_keyname(self, key, uid, prefix=''):
340 341 return '{0}{1}{2}_{3}'.format(
341 342 prefix, self.SETTINGS_PREFIX, key, uid)
342 343
343 344 def _make_dict_for_settings(self, qs):
344 345 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
345 346
346 347 issuetracker_entries = {}
347 348 # create keys
348 349 for k, v in qs.items():
349 350 if k.startswith(prefix_match):
350 351 uid = k[len(prefix_match):]
351 352 issuetracker_entries[uid] = None
352 353
353 354 def url_cleaner(input_str):
354 355 input_str = input_str.replace('"', '').replace("'", '')
355 356 input_str = bleach.clean(input_str, strip=True)
356 357 return input_str
357 358
358 359 # populate
359 360 for uid in issuetracker_entries:
360 361 url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_'))
361 362
362 363 issuetracker_entries[uid] = AttributeDict({
363 364 'pat': qs.get(
364 365 self._get_keyname('pat', uid, 'rhodecode_')),
365 366 'url': url_cleaner(
366 367 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
367 368 'pref': bleach.clean(
368 369 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
369 370 'desc': qs.get(
370 371 self._get_keyname('desc', uid, 'rhodecode_')),
371 372 })
372 373
373 374 return issuetracker_entries
374 375
375 376 def get_global_settings(self, cache=False):
376 377 """
377 378 Returns list of global issue tracker settings
378 379 """
379 380 defaults = self.global_settings.get_all_settings(cache=cache)
380 381 settings = self._make_dict_for_settings(defaults)
381 382 return settings
382 383
383 384 def get_repo_settings(self, cache=False):
384 385 """
385 386 Returns list of issue tracker settings per repository
386 387 """
387 388 if not self.repo_settings:
388 389 raise Exception('Repository is not specified')
389 390 all_settings = self.repo_settings.get_all_settings(cache=cache)
390 391 settings = self._make_dict_for_settings(all_settings)
391 392 return settings
392 393
393 394 def get_settings(self, cache=False):
394 395 if self.inherit_global_settings:
395 396 return self.get_global_settings(cache=cache)
396 397 else:
397 398 return self.get_repo_settings(cache=cache)
398 399
399 400 def delete_entries(self, uid):
400 401 if self.repo_settings:
401 402 all_patterns = self.get_repo_settings()
402 403 settings_model = self.repo_settings
403 404 else:
404 405 all_patterns = self.get_global_settings()
405 406 settings_model = self.global_settings
406 407 entries = all_patterns.get(uid, [])
407 408
408 409 for del_key in entries:
409 410 setting_name = self._get_keyname(del_key, uid)
410 411 entry = settings_model.get_setting_by_name(setting_name)
411 412 if entry:
412 413 Session().delete(entry)
413 414
414 415 Session().commit()
415 416
416 417 def create_or_update_setting(
417 418 self, name, val=Optional(''), type_=Optional('unicode')):
418 419 if self.repo_settings:
419 420 setting = self.repo_settings.create_or_update_setting(
420 421 name, val, type_)
421 422 else:
422 423 setting = self.global_settings.create_or_update_setting(
423 424 name, val, type_)
424 425 return setting
425 426
426 427
427 428 class VcsSettingsModel(object):
428 429
429 430 INHERIT_SETTINGS = 'inherit_vcs_settings'
430 431 GENERAL_SETTINGS = (
431 432 'use_outdated_comments',
432 433 'pr_merge_enabled',
433 434 'hg_use_rebase_for_merging',
434 435 'hg_close_branch_before_merging',
435 436 'git_use_rebase_for_merging',
436 437 'git_close_branch_before_merging',
437 438 'diff_cache',
438 439 )
439 440
440 441 HOOKS_SETTINGS = (
441 442 ('hooks', 'changegroup.repo_size'),
442 443 ('hooks', 'changegroup.push_logger'),
443 ('hooks', 'outgoing.pull_logger'),)
444 ('hooks', 'outgoing.pull_logger'),
445 )
444 446 HG_SETTINGS = (
445 447 ('extensions', 'largefiles'),
446 448 ('phases', 'publish'),
447 ('extensions', 'evolve'),)
449 ('extensions', 'evolve'),
450 ('extensions', 'topic'),
451 ('experimental', 'evolution'),
452 ('experimental', 'evolution.exchange'),
453 )
448 454 GIT_SETTINGS = (
449 ('vcs_git_lfs', 'enabled'),)
455 ('vcs_git_lfs', 'enabled'),
456 )
450 457 GLOBAL_HG_SETTINGS = (
451 458 ('extensions', 'largefiles'),
452 459 ('largefiles', 'usercache'),
453 460 ('phases', 'publish'),
454 461 ('extensions', 'hgsubversion'),
455 ('extensions', 'evolve'),)
462 ('extensions', 'evolve'),
463 ('extensions', 'topic'),
464 ('experimental', 'evolution'),
465 ('experimental', 'evolution.exchange'),
466 )
467
456 468 GLOBAL_GIT_SETTINGS = (
457 469 ('vcs_git_lfs', 'enabled'),
458 ('vcs_git_lfs', 'store_location'))
470 ('vcs_git_lfs', 'store_location')
471 )
459 472
460 473 GLOBAL_SVN_SETTINGS = (
461 474 ('vcs_svn_proxy', 'http_requests_enabled'),
462 ('vcs_svn_proxy', 'http_server_url'))
475 ('vcs_svn_proxy', 'http_server_url')
476 )
463 477
464 478 SVN_BRANCH_SECTION = 'vcs_svn_branch'
465 479 SVN_TAG_SECTION = 'vcs_svn_tag'
466 480 SSL_SETTING = ('web', 'push_ssl')
467 481 PATH_SETTING = ('paths', '/')
468 482
469 483 def __init__(self, sa=None, repo=None):
470 484 self.global_settings = SettingsModel(sa=sa)
471 485 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
472 486 self._ui_settings = (
473 487 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
474 488 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
475 489
476 490 @property
477 491 @assert_repo_settings
478 492 def inherit_global_settings(self):
479 493 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
480 494 return setting.app_settings_value if setting else True
481 495
482 496 @inherit_global_settings.setter
483 497 @assert_repo_settings
484 498 def inherit_global_settings(self, value):
485 499 self.repo_settings.create_or_update_setting(
486 500 self.INHERIT_SETTINGS, value, type_='bool')
487 501
488 502 def get_global_svn_branch_patterns(self):
489 503 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
490 504
491 505 @assert_repo_settings
492 506 def get_repo_svn_branch_patterns(self):
493 507 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
494 508
495 509 def get_global_svn_tag_patterns(self):
496 510 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
497 511
498 512 @assert_repo_settings
499 513 def get_repo_svn_tag_patterns(self):
500 514 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
501 515
502 516 def get_global_settings(self):
503 517 return self._collect_all_settings(global_=True)
504 518
505 519 @assert_repo_settings
506 520 def get_repo_settings(self):
507 521 return self._collect_all_settings(global_=False)
508 522
509 523 @assert_repo_settings
510 524 def create_or_update_repo_settings(
511 525 self, data, inherit_global_settings=False):
512 526 from rhodecode.model.scm import ScmModel
513 527
514 528 self.inherit_global_settings = inherit_global_settings
515 529
516 530 repo = self.repo_settings.get_repo()
517 531 if not inherit_global_settings:
518 532 if repo.repo_type == 'svn':
519 533 self.create_repo_svn_settings(data)
520 534 else:
521 535 self.create_or_update_repo_hook_settings(data)
522 536 self.create_or_update_repo_pr_settings(data)
523 537
524 538 if repo.repo_type == 'hg':
525 539 self.create_or_update_repo_hg_settings(data)
526 540
527 541 if repo.repo_type == 'git':
528 542 self.create_or_update_repo_git_settings(data)
529 543
530 544 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
531 545
532 546 @assert_repo_settings
533 547 def create_or_update_repo_hook_settings(self, data):
534 548 for section, key in self.HOOKS_SETTINGS:
535 549 data_key = self._get_form_ui_key(section, key)
536 550 if data_key not in data:
537 551 raise ValueError(
538 552 'The given data does not contain {} key'.format(data_key))
539 553
540 554 active = data.get(data_key)
541 555 repo_setting = self.repo_settings.get_ui_by_section_and_key(
542 556 section, key)
543 557 if not repo_setting:
544 558 global_setting = self.global_settings.\
545 559 get_ui_by_section_and_key(section, key)
546 560 self.repo_settings.create_ui_section_value(
547 561 section, global_setting.ui_value, key=key, active=active)
548 562 else:
549 563 repo_setting.ui_active = active
550 564 Session().add(repo_setting)
551 565
552 566 def update_global_hook_settings(self, data):
553 567 for section, key in self.HOOKS_SETTINGS:
554 568 data_key = self._get_form_ui_key(section, key)
555 569 if data_key not in data:
556 570 raise ValueError(
557 571 'The given data does not contain {} key'.format(data_key))
558 572 active = data.get(data_key)
559 573 repo_setting = self.global_settings.get_ui_by_section_and_key(
560 574 section, key)
561 575 repo_setting.ui_active = active
562 576 Session().add(repo_setting)
563 577
564 578 @assert_repo_settings
565 579 def create_or_update_repo_pr_settings(self, data):
566 580 return self._create_or_update_general_settings(
567 581 self.repo_settings, data)
568 582
569 583 def create_or_update_global_pr_settings(self, data):
570 584 return self._create_or_update_general_settings(
571 585 self.global_settings, data)
572 586
573 587 @assert_repo_settings
574 588 def create_repo_svn_settings(self, data):
575 589 return self._create_svn_settings(self.repo_settings, data)
576 590
591 def _set_evolution(self, settings, is_enabled):
592 if is_enabled:
593 # if evolve is active set evolution=all
594
595 self._create_or_update_ui(
596 settings, *('experimental', 'evolution'), value='all',
597 active=True)
598 self._create_or_update_ui(
599 settings, *('experimental', 'evolution.exchange'), value='yes',
600 active=True)
601 # if evolve is active set topics server support
602 self._create_or_update_ui(
603 settings, *('extensions', 'topic'), value='',
604 active=True)
605
606 else:
607 self._create_or_update_ui(
608 settings, *('experimental', 'evolution'), value='',
609 active=False)
610 self._create_or_update_ui(
611 settings, *('experimental', 'evolution.exchange'), value='no',
612 active=False)
613 self._create_or_update_ui(
614 settings, *('extensions', 'topic'), value='',
615 active=False)
616
577 617 @assert_repo_settings
578 618 def create_or_update_repo_hg_settings(self, data):
579 619 largefiles, phases, evolve = \
580 self.HG_SETTINGS
620 self.HG_SETTINGS[:3]
581 621 largefiles_key, phases_key, evolve_key = \
582 self._get_settings_keys(self.HG_SETTINGS, data)
622 self._get_settings_keys(self.HG_SETTINGS[:3], data)
583 623
584 624 self._create_or_update_ui(
585 625 self.repo_settings, *largefiles, value='',
586 626 active=data[largefiles_key])
587 627 self._create_or_update_ui(
588 628 self.repo_settings, *evolve, value='',
589 629 active=data[evolve_key])
630 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
631
590 632 self._create_or_update_ui(
591 633 self.repo_settings, *phases, value=safe_str(data[phases_key]))
592 634
593 635 def create_or_update_global_hg_settings(self, data):
594 636 largefiles, largefiles_store, phases, hgsubversion, evolve \
595 = self.GLOBAL_HG_SETTINGS
637 = self.GLOBAL_HG_SETTINGS[:5]
596 638 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
597 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data)
639 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data)
598 640
599 641 self._create_or_update_ui(
600 642 self.global_settings, *largefiles, value='',
601 643 active=data[largefiles_key])
602 644 self._create_or_update_ui(
603 self.global_settings, *largefiles_store,
604 value=data[largefiles_store_key])
645 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
605 646 self._create_or_update_ui(
606 647 self.global_settings, *phases, value=safe_str(data[phases_key]))
607 648 self._create_or_update_ui(
608 649 self.global_settings, *hgsubversion, active=data[subversion_key])
609 650 self._create_or_update_ui(
610 651 self.global_settings, *evolve, value='',
611 652 active=data[evolve_key])
653 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
612 654
613 655 def create_or_update_repo_git_settings(self, data):
614 # NOTE(marcink): # comma make unpack work properly
656 # NOTE(marcink): # comma makes unpack work properly
615 657 lfs_enabled, \
616 658 = self.GIT_SETTINGS
617 659
618 660 lfs_enabled_key, \
619 661 = self._get_settings_keys(self.GIT_SETTINGS, data)
620 662
621 663 self._create_or_update_ui(
622 664 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
623 665 active=data[lfs_enabled_key])
624 666
625 667 def create_or_update_global_git_settings(self, data):
626 668 lfs_enabled, lfs_store_location \
627 669 = self.GLOBAL_GIT_SETTINGS
628 670 lfs_enabled_key, lfs_store_location_key \
629 671 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
630 672
631 673 self._create_or_update_ui(
632 674 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
633 675 active=data[lfs_enabled_key])
634 676 self._create_or_update_ui(
635 677 self.global_settings, *lfs_store_location,
636 678 value=data[lfs_store_location_key])
637 679
638 680 def create_or_update_global_svn_settings(self, data):
639 681 # branch/tags patterns
640 682 self._create_svn_settings(self.global_settings, data)
641 683
642 684 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
643 685 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
644 686 self.GLOBAL_SVN_SETTINGS, data)
645 687
646 688 self._create_or_update_ui(
647 689 self.global_settings, *http_requests_enabled,
648 690 value=safe_str(data[http_requests_enabled_key]))
649 691 self._create_or_update_ui(
650 692 self.global_settings, *http_server_url,
651 693 value=data[http_server_url_key])
652 694
653 695 def update_global_ssl_setting(self, value):
654 696 self._create_or_update_ui(
655 697 self.global_settings, *self.SSL_SETTING, value=value)
656 698
657 699 def update_global_path_setting(self, value):
658 700 self._create_or_update_ui(
659 701 self.global_settings, *self.PATH_SETTING, value=value)
660 702
661 703 @assert_repo_settings
662 704 def delete_repo_svn_pattern(self, id_):
663 705 ui = self.repo_settings.UiDbModel.get(id_)
664 706 if ui and ui.repository.repo_name == self.repo_settings.repo:
665 707 # only delete if it's the same repo as initialized settings
666 708 self.repo_settings.delete_ui(id_)
667 709 else:
668 710 # raise error as if we wouldn't find this option
669 711 self.repo_settings.delete_ui(-1)
670 712
671 713 def delete_global_svn_pattern(self, id_):
672 714 self.global_settings.delete_ui(id_)
673 715
674 716 @assert_repo_settings
675 717 def get_repo_ui_settings(self, section=None, key=None):
676 718 global_uis = self.global_settings.get_ui(section, key)
677 719 repo_uis = self.repo_settings.get_ui(section, key)
720
678 721 filtered_repo_uis = self._filter_ui_settings(repo_uis)
679 722 filtered_repo_uis_keys = [
680 723 (s.section, s.key) for s in filtered_repo_uis]
681 724
682 725 def _is_global_ui_filtered(ui):
683 726 return (
684 727 (ui.section, ui.key) in filtered_repo_uis_keys
685 728 or ui.section in self._svn_sections)
686 729
687 730 filtered_global_uis = [
688 731 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
689 732
690 733 return filtered_global_uis + filtered_repo_uis
691 734
692 735 def get_global_ui_settings(self, section=None, key=None):
693 736 return self.global_settings.get_ui(section, key)
694 737
695 738 def get_ui_settings_as_config_obj(self, section=None, key=None):
696 739 config = base.Config()
697 740
698 741 ui_settings = self.get_ui_settings(section=section, key=key)
699 742
700 743 for entry in ui_settings:
701 744 config.set(entry.section, entry.key, entry.value)
702 745
703 746 return config
704 747
705 748 def get_ui_settings(self, section=None, key=None):
706 749 if not self.repo_settings or self.inherit_global_settings:
707 750 return self.get_global_ui_settings(section, key)
708 751 else:
709 752 return self.get_repo_ui_settings(section, key)
710 753
711 754 def get_svn_patterns(self, section=None):
712 755 if not self.repo_settings:
713 756 return self.get_global_ui_settings(section)
714 757 else:
715 758 return self.get_repo_ui_settings(section)
716 759
717 760 @assert_repo_settings
718 761 def get_repo_general_settings(self):
719 762 global_settings = self.global_settings.get_all_settings()
720 763 repo_settings = self.repo_settings.get_all_settings()
721 764 filtered_repo_settings = self._filter_general_settings(repo_settings)
722 765 global_settings.update(filtered_repo_settings)
723 766 return global_settings
724 767
725 768 def get_global_general_settings(self):
726 769 return self.global_settings.get_all_settings()
727 770
728 771 def get_general_settings(self):
729 772 if not self.repo_settings or self.inherit_global_settings:
730 773 return self.get_global_general_settings()
731 774 else:
732 775 return self.get_repo_general_settings()
733 776
734 777 def get_repos_location(self):
735 778 return self.global_settings.get_ui_by_key('/').ui_value
736 779
737 780 def _filter_ui_settings(self, settings):
738 781 filtered_settings = [
739 782 s for s in settings if self._should_keep_setting(s)]
740 783 return filtered_settings
741 784
742 785 def _should_keep_setting(self, setting):
743 786 keep = (
744 787 (setting.section, setting.key) in self._ui_settings or
745 788 setting.section in self._svn_sections)
746 789 return keep
747 790
748 791 def _filter_general_settings(self, settings):
749 792 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
750 793 return {
751 794 k: settings[k]
752 795 for k in settings if k in keys}
753 796
754 797 def _collect_all_settings(self, global_=False):
755 798 settings = self.global_settings if global_ else self.repo_settings
756 799 result = {}
757 800
758 801 for section, key in self._ui_settings:
759 802 ui = settings.get_ui_by_section_and_key(section, key)
760 803 result_key = self._get_form_ui_key(section, key)
761 804
762 805 if ui:
763 806 if section in ('hooks', 'extensions'):
764 807 result[result_key] = ui.ui_active
765 808 elif result_key in ['vcs_git_lfs_enabled']:
766 809 result[result_key] = ui.ui_active
767 810 else:
768 811 result[result_key] = ui.ui_value
769 812
770 813 for name in self.GENERAL_SETTINGS:
771 814 setting = settings.get_setting_by_name(name)
772 815 if setting:
773 816 result_key = 'rhodecode_{}'.format(name)
774 817 result[result_key] = setting.app_settings_value
775 818
776 819 return result
777 820
778 821 def _get_form_ui_key(self, section, key):
779 822 return '{section}_{key}'.format(
780 823 section=section, key=key.replace('.', '_'))
781 824
782 825 def _create_or_update_ui(
783 826 self, settings, section, key, value=None, active=None):
784 827 ui = settings.get_ui_by_section_and_key(section, key)
785 828 if not ui:
786 829 active = True if active is None else active
787 830 settings.create_ui_section_value(
788 831 section, value, key=key, active=active)
789 832 else:
790 833 if active is not None:
791 834 ui.ui_active = active
792 835 if value is not None:
793 836 ui.ui_value = value
794 837 Session().add(ui)
795 838
796 839 def _create_svn_settings(self, settings, data):
797 840 svn_settings = {
798 841 'new_svn_branch': self.SVN_BRANCH_SECTION,
799 842 'new_svn_tag': self.SVN_TAG_SECTION
800 843 }
801 844 for key in svn_settings:
802 845 if data.get(key):
803 846 settings.create_ui_section_value(svn_settings[key], data[key])
804 847
805 848 def _create_or_update_general_settings(self, settings, data):
806 849 for name in self.GENERAL_SETTINGS:
807 850 data_key = 'rhodecode_{}'.format(name)
808 851 if data_key not in data:
809 852 raise ValueError(
810 853 'The given data does not contain {} key'.format(data_key))
811 854 setting = settings.create_or_update_setting(
812 855 name, data[data_key], 'bool')
813 856 Session().add(setting)
814 857
815 858 def _get_settings_keys(self, settings, data):
816 859 data_keys = [self._get_form_ui_key(*s) for s in settings]
817 860 for data_key in data_keys:
818 861 if data_key not in data:
819 862 raise ValueError(
820 863 'The given data does not contain {} key'.format(data_key))
821 864 return data_keys
822 865
823 866 def create_largeobjects_dirs_if_needed(self, repo_store_path):
824 867 """
825 868 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
826 869 does a repository scan if enabled in the settings.
827 870 """
828 871
829 872 from rhodecode.lib.vcs.backends.hg import largefiles_store
830 873 from rhodecode.lib.vcs.backends.git import lfs_store
831 874
832 875 paths = [
833 876 largefiles_store(repo_store_path),
834 877 lfs_store(repo_store_path)]
835 878
836 879 for path in paths:
837 880 if os.path.isdir(path):
838 881 continue
839 882 if os.path.isfile(path):
840 883 continue
841 884 # not a file nor dir, we try to create it
842 885 try:
843 886 os.makedirs(path)
844 887 except Exception:
845 888 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,101 +1,116 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="/base/base.mako"/>
3 3
4 4 <%def name="title()">
5 5 ${_('Repository groups administration')}
6 6 %if c.rhodecode_name:
7 7 &middot; ${h.branding(c.rhodecode_name)}
8 8 %endif
9 9 </%def>
10 10
11 11 <%def name="breadcrumbs_links()"></%def>
12 12
13 13 <%def name="menu_bar_nav()">
14 14 ${self.menu_items(active='admin')}
15 15 </%def>
16 16
17 17 <%def name="menu_bar_subnav()">
18 18 ${self.admin_menu(active='repository_groups')}
19 19 </%def>
20 20
21 21 <%def name="main()">
22 22 <div class="box">
23 23
24 24 <div class="title">
25 25 <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" placeholder="${_('quick filter...')}" value=""/>
26 26 <span id="repo_group_count">0</span> ${_('repository groups')}
27 27
28 28 <ul class="links">
29 29 %if c.can_create_repo_group:
30 30 <li>
31 31 <a href="${h.route_path('repo_group_new')}" class="btn btn-small btn-success">${_(u'Add Repository Group')}</a>
32 32 </li>
33 33 %endif
34 34 </ul>
35 35 </div>
36 36 <div id="repos_list_wrap">
37 37 <table id="group_list_table" class="display"></table>
38 38 </div>
39 39 </div>
40 40
41 41 <script>
42 42 $(document).ready(function() {
43
44 var get_datatable_count = function(){
45 var api = $('#group_list_table').dataTable().api();
46 $('#repo_group_count').text(api.page.info().recordsDisplay);
47 };
43 var $repoGroupsListTable = $('#group_list_table');
48 44
49 45 // repo group list
50 $('#group_list_table').DataTable({
51 data: ${c.data|n},
46 $repoGroupsListTable.DataTable({
47 processing: true,
48 serverSide: true,
49 ajax: {
50 "url": "${h.route_path('repo_groups_data')}",
51 "dataSrc": function (json) {
52 var filteredCount = json.recordsFiltered;
53 var filteredInactiveCount = json.recordsFilteredInactive;
54 var totalInactive = json.recordsTotalInactive;
55 var total = json.recordsTotal;
56
57 var _text = _gettext(
58 "{0} of {1} repository groups").format(
59 filteredCount, total);
60
61 if (total === filteredCount) {
62 _text = _gettext("{0} repository groups").format(total);
63 }
64 $('#repo_group_count').text(_text);
65 return json.data;
66 },
67 },
68
52 69 dom: 'rtp',
53 70 pageLength: ${c.visual.admin_grid_items},
54 71 order: [[ 0, "asc" ]],
55 72 columns: [
56 73 { data: {"_": "name",
57 74 "sort": "name_raw"}, title: "${_('Name')}", className: "td-componentname" },
58 75 { data: 'menu', "bSortable": false, className: "quick_repo_menu" },
59 76 { data: {"_": "desc",
60 77 "sort": "desc"}, title: "${_('Description')}", className: "td-description" },
61 78 { data: {"_": "last_change",
62 79 "sort": "last_change_raw",
63 80 "type": Number}, title: "${_('Last Change')}", className: "td-time" },
64 81 { data: {"_": "top_level_repos",
65 82 "sort": "top_level_repos"}, title: "${_('Number of top level repositories')}" },
66 83 { data: {"_": "owner",
67 84 "sort": "owner"}, title: "${_('Owner')}", className: "td-user" },
68 85 { data: {"_": "action",
69 "sort": "action"}, title: "${_('Action')}", className: "td-action" }
86 "sort": "action"}, title: "${_('Action')}", className: "td-action", orderable: false }
70 87 ],
71 88 language: {
72 89 paginate: DEFAULT_GRID_PAGINATION,
90 sProcessing: _gettext('loading...'),
73 91 emptyTable: _gettext("No repository groups available yet.")
74 92 },
75 "initComplete": function( settings, json ) {
76 get_datatable_count();
77 quick_repo_menu();
78 }
79 93 });
80 94
81 // update the counter when doing search
82 $('#group_list_table').on( 'search.dt', function (e,settings) {
83 get_datatable_count();
95 $repoGroupsListTable.on('xhr.dt', function(e, settings, json, xhr){
96 $repoGroupsListTable.css('opacity', 1);
97 });
98
99 $repoGroupsListTable.on('preXhr.dt', function(e, settings, data){
100 $repoGroupsListTable.css('opacity', 0.3);
84 101 });
85 102
86 // filter, filter both grids
87 $('#q_filter').on( 'keyup', function () {
88
89 var repo_group_api = $('#group_list_table').dataTable().api();
90 repo_group_api
91 .columns(0)
92 .search(this.value)
93 .draw();
103 // filter
104 $('#q_filter').on('keyup',
105 $.debounce(250, function() {
106 $repoGroupsListTable.DataTable().search(
107 $('#q_filter').val()
108 ).draw();
109 })
110 );
94 111 });
95 112
96 // refilter table if page load via back button
97 $("#q_filter").trigger('keyup');
98 });
99 113 </script>
114
100 115 </%def>
101 116
@@ -1,105 +1,105 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="/base/base.mako"/>
3 3
4 4 <%def name="title()">
5 5 ${_('Repositories administration')}
6 6 %if c.rhodecode_name:
7 7 &middot; ${h.branding(c.rhodecode_name)}
8 8 %endif
9 9 </%def>
10 10
11 11 <%def name="breadcrumbs_links()"></%def>
12 12
13 13 <%def name="menu_bar_nav()">
14 14 ${self.menu_items(active='admin')}
15 15 </%def>
16 16
17 17 <%def name="menu_bar_subnav()">
18 18 ${self.admin_menu(active='repositories')}
19 19 </%def>
20 20
21 21 <%def name="main()">
22 22 <div class="box">
23 23
24 24 <div class="title">
25 25 <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" placeholder="${_('quick filter...')}" value=""/>
26 26 <span id="repo_count">0</span> ${_('repositories')}
27 27
28 28 <ul class="links">
29 29 %if c.can_create_repo:
30 30 <li>
31 31 <a href="${h.route_path('repo_new')}" class="btn btn-small btn-success">${_(u'Add Repository')}</a>
32 32 </li>
33 33 %endif
34 34 </ul>
35 35 </div>
36 36 <div id="repos_list_wrap">
37 37 <table id="repo_list_table" class="display"></table>
38 38 </div>
39 39 </div>
40 40
41 41 <script>
42 42 $(document).ready(function() {
43 43
44 44 var get_datatable_count = function(){
45 45 var api = $('#repo_list_table').dataTable().api();
46 46 $('#repo_count').text(api.page.info().recordsDisplay);
47 47 };
48 48
49 49
50 50 // repo list
51 51 $('#repo_list_table').DataTable({
52 52 data: ${c.data|n},
53 53 dom: 'rtp',
54 54 pageLength: ${c.visual.admin_grid_items},
55 55 order: [[ 0, "asc" ]],
56 56 columns: [
57 57 { data: {"_": "name",
58 58 "sort": "name_raw"}, title: "${_('Name')}", className: "td-componentname" },
59 59 { data: 'menu', "bSortable": false, className: "quick_repo_menu" },
60 60 { data: {"_": "desc",
61 61 "sort": "desc"}, title: "${_('Description')}", className: "td-description" },
62 62 { data: {"_": "last_change",
63 63 "sort": "last_change_raw",
64 64 "type": Number}, title: "${_('Last Change')}", className: "td-time" },
65 65 { data: {"_": "last_changeset",
66 66 "sort": "last_changeset_raw",
67 67 "type": Number}, title: "${_('Commit')}", className: "td-commit" },
68 68 { data: {"_": "owner",
69 69 "sort": "owner"}, title: "${_('Owner')}", className: "td-user" },
70 70 { data: {"_": "state",
71 71 "sort": "state"}, title: "${_('State')}", className: "td-tags td-state" },
72 72 { data: {"_": "action",
73 "sort": "action"}, title: "${_('Action')}", className: "td-action" }
73 "sort": "action"}, title: "${_('Action')}", className: "td-action", orderable: false }
74 74 ],
75 75 language: {
76 76 paginate: DEFAULT_GRID_PAGINATION,
77 77 emptyTable:_gettext("No repositories available yet.")
78 78 },
79 79 "initComplete": function( settings, json ) {
80 80 get_datatable_count();
81 81 quick_repo_menu();
82 82 }
83 83 });
84 84
85 85 // update the counter when doing search
86 86 $('#repo_list_table').on( 'search.dt', function (e,settings) {
87 87 get_datatable_count();
88 88 });
89 89
90 90 // filter, filter both grids
91 91 $('#q_filter').on( 'keyup', function () {
92 92 var repo_api = $('#repo_list_table').dataTable().api();
93 93 repo_api
94 94 .columns(0)
95 95 .search(this.value)
96 96 .draw();
97 97 });
98 98
99 99 // refilter table if page load via back button
100 100 $("#q_filter").trigger('keyup');
101 101 });
102 102
103 103 </script>
104 104
105 105 </%def>
@@ -1,118 +1,118 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="/base/base.mako"/>
3 3
4 4 <%def name="title()">
5 5 ${_('User groups administration')}
6 6 %if c.rhodecode_name:
7 7 &middot; ${h.branding(c.rhodecode_name)}
8 8 %endif
9 9 </%def>
10 10
11 11 <%def name="breadcrumbs_links()"></%def>
12 12
13 13 <%def name="menu_bar_nav()">
14 14 ${self.menu_items(active='admin')}
15 15 </%def>
16 16
17 17 <%def name="menu_bar_subnav()">
18 18 ${self.admin_menu(active='user_groups')}
19 19 </%def>
20 20
21 21 <%def name="main()">
22 22 <div class="box">
23 23
24 24 <div class="title">
25 25 <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" placeholder="${_('quick filter...')}" value=""/>
26 26 <span id="user_group_count">0</span>
27 27
28 28 <ul class="links">
29 29 %if c.can_create_user_group:
30 30 <li>
31 31 <a href="${h.route_path('user_groups_new')}" class="btn btn-small btn-success">${_(u'Add User Group')}</a>
32 32 </li>
33 33 %endif
34 34 </ul>
35 35 </div>
36 36
37 37 <div id="repos_list_wrap">
38 38 <table id="user_group_list_table" class="display"></table>
39 39 </div>
40 40
41 41 </div>
42 42 <script>
43 43 $(document).ready(function() {
44 44 var $userGroupsListTable = $('#user_group_list_table');
45 45
46 46 // user list
47 47 $userGroupsListTable.DataTable({
48 48 processing: true,
49 49 serverSide: true,
50 50 ajax: {
51 51 "url": "${h.route_path('user_groups_data')}",
52 52 "dataSrc": function (json) {
53 53 var filteredCount = json.recordsFiltered;
54 54 var filteredInactiveCount = json.recordsFilteredInactive;
55 55 var totalInactive = json.recordsTotalInactive;
56 56 var total = json.recordsTotal;
57 57
58 58 var _text = _gettext(
59 59 "{0} ({1} inactive) of {2} user groups ({3} inactive)").format(
60 60 filteredCount, filteredInactiveCount, total, totalInactive);
61 61
62 62 if (total === filteredCount) {
63 63 _text = _gettext(
64 64 "{0} user groups ({1} inactive)").format(total, totalInactive);
65 65 }
66 66 $('#user_group_count').text(_text);
67 67 return json.data;
68 68 },
69 69 },
70 70
71 71 dom: 'rtp',
72 72 pageLength: ${c.visual.admin_grid_items},
73 73 order: [[ 0, "asc" ]],
74 74 columns: [
75 75 { data: {"_": "users_group_name",
76 76 "sort": "users_group_name"}, title: "${_('Name')}", className: "td-componentname" },
77 77 { data: {"_": "description",
78 78 "sort": "description"}, title: "${_('Description')}", className: "td-description" },
79 79 { data: {"_": "members",
80 80 "sort": "members"}, title: "${_('Members')}", className: "td-number" },
81 81 { data: {"_": "sync",
82 82 "sort": "sync"}, title: "${_('Sync')}", className: "td-sync" },
83 83 { data: {"_": "active",
84 84 "sort": "active"}, title: "${_('Active')}", className: "td-active" },
85 85 { data: {"_": "owner",
86 86 "sort": "owner"}, title: "${_('Owner')}", className: "td-user" },
87 87 { data: {"_": "action",
88 88 "sort": "action"}, title: "${_('Action')}", className: "td-action", orderable: false}
89 89 ],
90 90 language: {
91 91 paginate: DEFAULT_GRID_PAGINATION,
92 92 sProcessing: _gettext('loading...'),
93 93 emptyTable: _gettext("No user groups available yet.")
94 94 }
95 95 });
96 96
97 97 $userGroupsListTable.on('xhr.dt', function(e, settings, json, xhr){
98 98 $userGroupsListTable.css('opacity', 1);
99 99 });
100 100
101 101 $userGroupsListTable.on('preXhr.dt', function(e, settings, data){
102 102 $userGroupsListTable.css('opacity', 0.3);
103 103 });
104 104
105 105 // filter
106 106 $('#q_filter').on('keyup',
107 107 $.debounce(250, function() {
108 $('#user_group_list_table').DataTable().search(
108 $userGroupsListTable.DataTable().search(
109 109 $('#q_filter').val()
110 110 ).draw();
111 111 })
112 112 );
113 113
114 114 });
115 115
116 116 </script>
117 117
118 118 </%def>
@@ -1,384 +1,384 b''
1 1 ## snippet for displaying vcs settings
2 2 ## usage:
3 3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 4 ## ${vcss.vcs_settings_fields()}
5 5
6 6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, allow_repo_location_change=False, **kwargs)">
7 7 % if display_globals:
8 8 <div class="panel panel-default">
9 9 <div class="panel-heading" id="general">
10 10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ¶</a></h3>
11 11 </div>
12 12 <div class="panel-body">
13 13 <div class="field">
14 14 <div class="checkbox">
15 15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
16 16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
17 17 </div>
18 18 <div class="label">
19 19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
20 20 </div>
21 21 </div>
22 22 </div>
23 23 </div>
24 24 % endif
25 25
26 26 % if display_globals:
27 27 <div class="panel panel-default">
28 28 <div class="panel-heading" id="vcs-storage-options">
29 29 <h3 class="panel-title">${_('Main Storage Location')}<a class="permalink" href="#vcs-storage-options"> ¶</a></h3>
30 30 </div>
31 31 <div class="panel-body">
32 32 <div class="field">
33 33 <div class="inputx locked_input">
34 34 %if allow_repo_location_change:
35 35 ${h.text('paths_root_path',size=59,readonly="readonly", class_="disabled")}
36 36 <span id="path_unlock" class="tooltip"
37 37 title="${h.tooltip(_('Click to unlock. You must restart RhodeCode in order to make this setting take effect.'))}">
38 38 <div class="btn btn-default lock_input_button"><i id="path_unlock_icon" class="icon-lock"></i></div>
39 39 </span>
40 40 %else:
41 41 ${_('Repository location change is disabled. You can enable this by changing the `allow_repo_location_change` inside .ini file.')}
42 42 ## form still requires this but we cannot internally change it anyway
43 43 ${h.hidden('paths_root_path',size=30,readonly="readonly", class_="disabled")}
44 44 %endif
45 45 </div>
46 46 </div>
47 47 <div class="label">
48 48 <span class="help-block">${_('Filesystem location where repositories should be stored. After changing this value a restart and rescan of the repository folder are required.')}</span>
49 49 </div>
50 50 </div>
51 51 </div>
52 52 % endif
53 53
54 54 % if display_globals or repo_type in ['git', 'hg']:
55 55 <div class="panel panel-default">
56 56 <div class="panel-heading" id="vcs-hooks-options">
57 57 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ¶</a></h3>
58 58 </div>
59 59 <div class="panel-body">
60 60 <div class="field">
61 61 <div class="checkbox">
62 62 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
63 63 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
64 64 </div>
65 65
66 66 <div class="label">
67 67 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
68 68 </div>
69 69 <div class="checkbox">
70 70 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
71 71 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
72 72 </div>
73 73 <div class="label">
74 74 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
75 75 </div>
76 76 <div class="checkbox">
77 77 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
78 78 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
79 79 </div>
80 80 <div class="label">
81 81 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
82 82 </div>
83 83 </div>
84 84 </div>
85 85 </div>
86 86 % endif
87 87
88 88 % if display_globals or repo_type in ['hg']:
89 89 <div class="panel panel-default">
90 90 <div class="panel-heading" id="vcs-hg-options">
91 91 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ¶</a></h3>
92 92 </div>
93 93 <div class="panel-body">
94 94 <div class="checkbox">
95 95 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
96 96 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
97 97 </div>
98 98 <div class="label">
99 99 % if display_globals:
100 100 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
101 101 % else:
102 102 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
103 103 % endif
104 104 </div>
105 105
106 106 % if display_globals:
107 107 <div class="field">
108 108 <div class="input">
109 109 ${h.text('largefiles_usercache' + suffix, size=59)}
110 110 </div>
111 111 </div>
112 112 <div class="label">
113 113 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
114 114 </div>
115 115 % endif
116 116
117 117 <div class="checkbox">
118 118 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
119 119 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
120 120 </div>
121 121 <div class="label">
122 122 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
123 123 </div>
124 124 % if display_globals:
125 125 <div class="checkbox">
126 126 ${h.checkbox('extensions_hgsubversion' + suffix,'True')}
127 127 <label for="extensions_hgsubversion${suffix}">${_('Enable hgsubversion extension')}</label>
128 128 </div>
129 129 <div class="label">
130 130 <span class="help-block">${_('Requires hgsubversion library to be installed. Allows cloning remote SVN repositories and migrates them to Mercurial type.')}</span>
131 131 </div>
132 132 % endif
133 133
134 134 <div class="checkbox">
135 135 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
136 <label for="extensions_evolve${suffix}">${_('Enable evolve extension')}</label>
136 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
137 137 </div>
138 138 <div class="label">
139 139 % if display_globals:
140 <span class="help-block">${_('Enable evolve extension for all repositories.')}</span>
140 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
141 141 % else:
142 <span class="help-block">${_('Enable evolve extension for this repository.')}</span>
142 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
143 143 % endif
144 144 </div>
145 145
146 146 </div>
147 147 </div>
148 148 % endif
149 149
150 150 % if display_globals or repo_type in ['git']:
151 151 <div class="panel panel-default">
152 152 <div class="panel-heading" id="vcs-git-options">
153 153 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ¶</a></h3>
154 154 </div>
155 155 <div class="panel-body">
156 156 <div class="checkbox">
157 157 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
158 158 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
159 159 </div>
160 160 <div class="label">
161 161 % if display_globals:
162 162 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
163 163 % else:
164 164 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
165 165 % endif
166 166 </div>
167 167
168 168 % if display_globals:
169 169 <div class="field">
170 170 <div class="input">
171 171 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
172 172 </div>
173 173 </div>
174 174 <div class="label">
175 175 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
176 176 </div>
177 177 % endif
178 178 </div>
179 179 </div>
180 180 % endif
181 181
182 182
183 183 % if display_globals:
184 184 <div class="panel panel-default">
185 185 <div class="panel-heading" id="vcs-global-svn-options">
186 186 <h3 class="panel-title">${_('Global Subversion Settings')}<a class="permalink" href="#vcs-global-svn-options"> ¶</a></h3>
187 187 </div>
188 188 <div class="panel-body">
189 189 <div class="field">
190 190 <div class="checkbox">
191 191 ${h.checkbox('vcs_svn_proxy_http_requests_enabled' + suffix, 'True', **kwargs)}
192 192 <label for="vcs_svn_proxy_http_requests_enabled${suffix}">${_('Proxy subversion HTTP requests')}</label>
193 193 </div>
194 194 <div class="label">
195 195 <span class="help-block">
196 196 ${_('Subversion HTTP Support. Enables communication with SVN over HTTP protocol.')}
197 197 <a href="${h.route_url('enterprise_svn_setup')}" target="_blank">${_('SVN Protocol setup Documentation')}</a>.
198 198 </span>
199 199 </div>
200 200 </div>
201 201 <div class="field">
202 202 <div class="label">
203 203 <label for="vcs_svn_proxy_http_server_url">${_('Subversion HTTP Server URL')}</label><br/>
204 204 </div>
205 205 <div class="input">
206 206 ${h.text('vcs_svn_proxy_http_server_url',size=59)}
207 207 % if c.svn_proxy_generate_config:
208 208 <span class="buttons">
209 209 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Generate Apache Config')}</button>
210 210 </span>
211 211 % endif
212 212 </div>
213 213 </div>
214 214 </div>
215 215 </div>
216 216 % endif
217 217
218 218 % if display_globals or repo_type in ['svn']:
219 219 <div class="panel panel-default">
220 220 <div class="panel-heading" id="vcs-svn-options">
221 221 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ¶</a></h3>
222 222 </div>
223 223 <div class="panel-body">
224 224 <div class="field">
225 225 <div class="content" >
226 226 <label>${_('Repository patterns')}</label><br/>
227 227 </div>
228 228 </div>
229 229 <div class="label">
230 230 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
231 231 </div>
232 232
233 233 <div class="field branch_patterns">
234 234 <div class="input" >
235 235 <label>${_('Branches')}:</label><br/>
236 236 </div>
237 237 % if svn_branch_patterns:
238 238 % for branch in svn_branch_patterns:
239 239 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
240 240 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
241 241 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
242 242 % if kwargs.get('disabled') != 'disabled':
243 243 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
244 244 ${_('Delete')}
245 245 </span>
246 246 % endif
247 247 </div>
248 248 % endfor
249 249 %endif
250 250 </div>
251 251 % if kwargs.get('disabled') != 'disabled':
252 252 <div class="field branch_patterns">
253 253 <div class="input" >
254 254 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
255 255 </div>
256 256 </div>
257 257 % endif
258 258 <div class="field tag_patterns">
259 259 <div class="input" >
260 260 <label>${_('Tags')}:</label><br/>
261 261 </div>
262 262 % if svn_tag_patterns:
263 263 % for tag in svn_tag_patterns:
264 264 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
265 265 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
266 266 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
267 267 % if kwargs.get('disabled') != 'disabled':
268 268 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
269 269 ${_('Delete')}
270 270 </span>
271 271 %endif
272 272 </div>
273 273 % endfor
274 274 % endif
275 275 </div>
276 276 % if kwargs.get('disabled') != 'disabled':
277 277 <div class="field tag_patterns">
278 278 <div class="input" >
279 279 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
280 280 </div>
281 281 </div>
282 282 %endif
283 283 </div>
284 284 </div>
285 285 % else:
286 286 ${h.hidden('new_svn_branch' + suffix, '')}
287 287 ${h.hidden('new_svn_tag' + suffix, '')}
288 288 % endif
289 289
290 290
291 291 % if display_globals or repo_type in ['hg', 'git']:
292 292 <div class="panel panel-default">
293 293 <div class="panel-heading" id="vcs-pull-requests-options">
294 294 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ¶</a></h3>
295 295 </div>
296 296 <div class="panel-body">
297 297 <div class="checkbox">
298 298 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
299 299 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
300 300 </div>
301 301 <div class="label">
302 302 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
303 303 </div>
304 304 <div class="checkbox">
305 305 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
306 306 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
307 307 </div>
308 308 <div class="label">
309 309 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
310 310 </div>
311 311 </div>
312 312 </div>
313 313 % endif
314 314
315 315 % if display_globals or repo_type in ['hg', 'git', 'svn']:
316 316 <div class="panel panel-default">
317 317 <div class="panel-heading" id="vcs-pull-requests-options">
318 318 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ¶</a></h3>
319 319 </div>
320 320 <div class="panel-body">
321 321 <div class="checkbox">
322 322 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
323 323 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
324 324 </div>
325 325 </div>
326 326 </div>
327 327 % endif
328 328
329 329 % if display_globals or repo_type in ['hg',]:
330 330 <div class="panel panel-default">
331 331 <div class="panel-heading" id="vcs-pull-requests-options">
332 332 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ¶</a></h3>
333 333 </div>
334 334 <div class="panel-body">
335 335 ## Specific HG settings
336 336 <div class="checkbox">
337 337 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
338 338 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
339 339 </div>
340 340 <div class="label">
341 341 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
342 342 </div>
343 343
344 344 <div class="checkbox">
345 345 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
346 346 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
347 347 </div>
348 348 <div class="label">
349 349 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
350 350 </div>
351 351
352 352
353 353 </div>
354 354 </div>
355 355 % endif
356 356
357 357 ## DISABLED FOR GIT FOR NOW as the rebase/close is not supported yet
358 358 ## % if display_globals or repo_type in ['git']:
359 359 ## <div class="panel panel-default">
360 360 ## <div class="panel-heading" id="vcs-pull-requests-options">
361 361 ## <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ¶</a></h3>
362 362 ## </div>
363 363 ## <div class="panel-body">
364 364 ## <div class="checkbox">
365 365 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
366 366 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
367 367 ## </div>
368 368 ## <div class="label">
369 369 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
370 370 ## </div>
371 371 ##
372 372 ## <div class="checkbox">
373 373 ## ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
374 374 ## <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
375 375 ## </div>
376 376 ## <div class="label">
377 377 ## <span class="help-block">${_('Delete branch after merging it into destination branch. No effect when rebase strategy is use.')}</span>
378 378 ## </div>
379 379 ## </div>
380 380 ## </div>
381 381 ## % endif
382 382
383 383
384 384 </%def>
@@ -1,147 +1,152 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests import TestController
24 24 from rhodecode.tests.fixture import Fixture
25 25
26 26
27 27 def route_path(name, params=None, **kwargs):
28 28 import urllib
29 29 from rhodecode.apps._base import ADMIN_PREFIX
30 30
31 31 base_url = {
32 32 'home': '/',
33 33 'admin_home': ADMIN_PREFIX,
34 34 'repos':
35 35 ADMIN_PREFIX + '/repos',
36 36 'repo_groups':
37 37 ADMIN_PREFIX + '/repo_groups',
38 'repo_groups_data':
39 ADMIN_PREFIX + '/repo_groups_data',
38 40 'user_groups':
39 41 ADMIN_PREFIX + '/user_groups',
40 42 'user_groups_data':
41 43 ADMIN_PREFIX + '/user_groups_data',
42 44 }[name].format(**kwargs)
43 45
44 46 if params:
45 47 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
46 48 return base_url
47 49
48 50
49 51 fixture = Fixture()
50 52
51 53
52 54 class TestAdminDelegatedUser(TestController):
53 55
54 56 def test_regular_user_cannot_see_admin_interfaces(self, user_util, xhr_header):
55 57 user = user_util.create_user(password='qweqwe')
56 58 user_util.inherit_default_user_permissions(user.username, False)
57 59
58 60 self.log_user(user.username, 'qweqwe')
59 61
60 62 # user doesn't have any access to resources so main admin page should 404
61 63 self.app.get(route_path('admin_home'), status=404)
62 64
63 65 response = self.app.get(route_path('repos'), status=200)
64 66 response.mustcontain('data: []')
65 67
66 response = self.app.get(route_path('repo_groups'), status=200)
67 response.mustcontain('data: []')
68 response = self.app.get(route_path('repo_groups_data'),
69 status=200, extra_environ=xhr_header)
70 assert response.json['data'] == []
68 71
69 72 response = self.app.get(route_path('user_groups_data'),
70 73 status=200, extra_environ=xhr_header)
71 74 assert response.json['data'] == []
72 75
73 76 def test_regular_user_can_see_admin_interfaces_if_owner(self, user_util, xhr_header):
74 77 user = user_util.create_user(password='qweqwe')
75 78 username = user.username
76 79
77 80 repo = user_util.create_repo(owner=username)
78 81 repo_name = repo.repo_name
79 82
80 83 repo_group = user_util.create_repo_group(owner=username)
81 84 repo_group_name = repo_group.group_name
82 85
83 86 user_group = user_util.create_user_group(owner=username)
84 87 user_group_name = user_group.users_group_name
85 88
86 89 self.log_user(username, 'qweqwe')
87 90
88 91 response = self.app.get(route_path('admin_home'))
89 92
90 93 assert_response = response.assert_response()
91 94
92 95 assert_response.element_contains('td.delegated-admin-repos', '1')
93 96 assert_response.element_contains('td.delegated-admin-repo-groups', '1')
94 97 assert_response.element_contains('td.delegated-admin-user-groups', '1')
95 98
96 99 # admin interfaces have visible elements
97 100 response = self.app.get(route_path('repos'), status=200)
98 101 response.mustcontain('"name_raw": "{}"'.format(repo_name))
99 102
100 response = self.app.get(route_path('repo_groups'), status=200)
103 response = self.app.get(route_path('repo_groups_data'),
104 extra_environ=xhr_header, status=200)
101 105 response.mustcontain('"name_raw": "{}"'.format(repo_group_name))
102 106
103 107 response = self.app.get(route_path('user_groups_data'),
104 108 extra_environ=xhr_header, status=200)
105 109 response.mustcontain('"name_raw": "{}"'.format(user_group_name))
106 110
107 111 def test_regular_user_can_see_admin_interfaces_if_admin_perm(
108 112 self, user_util, xhr_header):
109 113 user = user_util.create_user(password='qweqwe')
110 114 username = user.username
111 115
112 116 repo = user_util.create_repo()
113 117 repo_name = repo.repo_name
114 118
115 119 repo_group = user_util.create_repo_group()
116 120 repo_group_name = repo_group.group_name
117 121
118 122 user_group = user_util.create_user_group()
119 123 user_group_name = user_group.users_group_name
120 124
121 125 user_util.grant_user_permission_to_repo(
122 126 repo, user, 'repository.admin')
123 127 user_util.grant_user_permission_to_repo_group(
124 128 repo_group, user, 'group.admin')
125 129 user_util.grant_user_permission_to_user_group(
126 130 user_group, user, 'usergroup.admin')
127 131
128 132 self.log_user(username, 'qweqwe')
129 133 # check if in home view, such user doesn't see the "admin" menus
130 134 response = self.app.get(route_path('admin_home'))
131 135
132 136 assert_response = response.assert_response()
133 137
134 138 assert_response.element_contains('td.delegated-admin-repos', '1')
135 139 assert_response.element_contains('td.delegated-admin-repo-groups', '1')
136 140 assert_response.element_contains('td.delegated-admin-user-groups', '1')
137 141
138 142 # admin interfaces have visible elements
139 143 response = self.app.get(route_path('repos'), status=200)
140 144 response.mustcontain('"name_raw": "{}"'.format(repo_name))
141 145
142 response = self.app.get(route_path('repo_groups'), status=200)
146 response = self.app.get(route_path('repo_groups_data'),
147 extra_environ=xhr_header, status=200)
143 148 response.mustcontain('"name_raw": "{}"'.format(repo_group_name))
144 149
145 150 response = self.app.get(route_path('user_groups_data'),
146 151 extra_environ=xhr_header, status=200)
147 152 response.mustcontain('"name_raw": "{}"'.format(user_group_name))
@@ -1,1082 +1,1080 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.lib.utils2 import str2bool
25 25 from rhodecode.model.meta import Session
26 26 from rhodecode.model.settings import VcsSettingsModel, UiSetting
27 27
28 28
29 29 HOOKS_FORM_DATA = {
30 30 'hooks_changegroup_repo_size': True,
31 31 'hooks_changegroup_push_logger': True,
32 32 'hooks_outgoing_pull_logger': True
33 33 }
34 34
35 35 SVN_FORM_DATA = {
36 36 'new_svn_branch': 'test-branch',
37 37 'new_svn_tag': 'test-tag'
38 38 }
39 39
40 40 GENERAL_FORM_DATA = {
41 41 'rhodecode_pr_merge_enabled': True,
42 42 'rhodecode_use_outdated_comments': True,
43 43 'rhodecode_hg_use_rebase_for_merging': True,
44 44 'rhodecode_hg_close_branch_before_merging': True,
45 45 'rhodecode_git_use_rebase_for_merging': True,
46 46 'rhodecode_git_close_branch_before_merging': True,
47 47 'rhodecode_diff_cache': True,
48 48 }
49 49
50 50
51 51 class TestInheritGlobalSettingsProperty(object):
52 52 def test_get_raises_exception_when_repository_not_specified(self):
53 53 model = VcsSettingsModel()
54 54 with pytest.raises(Exception) as exc_info:
55 55 model.inherit_global_settings
56 56 assert str(exc_info.value) == 'Repository is not specified'
57 57
58 58 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
59 59 model = VcsSettingsModel(repo=repo_stub.repo_name)
60 60 assert model.inherit_global_settings is True
61 61
62 62 def test_value_is_returned(self, repo_stub, settings_util):
63 63 model = VcsSettingsModel(repo=repo_stub.repo_name)
64 64 settings_util.create_repo_rhodecode_setting(
65 65 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
66 66 assert model.inherit_global_settings is False
67 67
68 68 def test_value_is_set(self, repo_stub):
69 69 model = VcsSettingsModel(repo=repo_stub.repo_name)
70 70 model.inherit_global_settings = False
71 71 setting = model.repo_settings.get_setting_by_name(
72 72 VcsSettingsModel.INHERIT_SETTINGS)
73 73 try:
74 74 assert setting.app_settings_type == 'bool'
75 75 assert setting.app_settings_value is False
76 76 finally:
77 77 Session().delete(setting)
78 78 Session().commit()
79 79
80 80 def test_set_raises_exception_when_repository_not_specified(self):
81 81 model = VcsSettingsModel()
82 82 with pytest.raises(Exception) as exc_info:
83 83 model.inherit_global_settings = False
84 84 assert str(exc_info.value) == 'Repository is not specified'
85 85
86 86
87 87 class TestVcsSettingsModel(object):
88 88 def test_global_svn_branch_patterns(self):
89 89 model = VcsSettingsModel()
90 90 expected_result = {'test': 'test'}
91 91 with mock.patch.object(model, 'global_settings') as settings_mock:
92 92 get_settings = settings_mock.get_ui_by_section
93 93 get_settings.return_value = expected_result
94 94 settings_mock.return_value = expected_result
95 95 result = model.get_global_svn_branch_patterns()
96 96
97 97 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
98 98 assert expected_result == result
99 99
100 100 def test_repo_svn_branch_patterns(self):
101 101 model = VcsSettingsModel()
102 102 expected_result = {'test': 'test'}
103 103 with mock.patch.object(model, 'repo_settings') as settings_mock:
104 104 get_settings = settings_mock.get_ui_by_section
105 105 get_settings.return_value = expected_result
106 106 settings_mock.return_value = expected_result
107 107 result = model.get_repo_svn_branch_patterns()
108 108
109 109 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
110 110 assert expected_result == result
111 111
112 112 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
113 113 self):
114 114 model = VcsSettingsModel()
115 115 with pytest.raises(Exception) as exc_info:
116 116 model.get_repo_svn_branch_patterns()
117 117 assert str(exc_info.value) == 'Repository is not specified'
118 118
119 119 def test_global_svn_tag_patterns(self):
120 120 model = VcsSettingsModel()
121 121 expected_result = {'test': 'test'}
122 122 with mock.patch.object(model, 'global_settings') as settings_mock:
123 123 get_settings = settings_mock.get_ui_by_section
124 124 get_settings.return_value = expected_result
125 125 settings_mock.return_value = expected_result
126 126 result = model.get_global_svn_tag_patterns()
127 127
128 128 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
129 129 assert expected_result == result
130 130
131 131 def test_repo_svn_tag_patterns(self):
132 132 model = VcsSettingsModel()
133 133 expected_result = {'test': 'test'}
134 134 with mock.patch.object(model, 'repo_settings') as settings_mock:
135 135 get_settings = settings_mock.get_ui_by_section
136 136 get_settings.return_value = expected_result
137 137 settings_mock.return_value = expected_result
138 138 result = model.get_repo_svn_tag_patterns()
139 139
140 140 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
141 141 assert expected_result == result
142 142
143 143 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
144 144 model = VcsSettingsModel()
145 145 with pytest.raises(Exception) as exc_info:
146 146 model.get_repo_svn_tag_patterns()
147 147 assert str(exc_info.value) == 'Repository is not specified'
148 148
149 149 def test_get_global_settings(self):
150 150 expected_result = {'test': 'test'}
151 151 model = VcsSettingsModel()
152 152 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
153 153 collect_mock.return_value = expected_result
154 154 result = model.get_global_settings()
155 155
156 156 collect_mock.assert_called_once_with(global_=True)
157 157 assert result == expected_result
158 158
159 159 def test_get_repo_settings(self, repo_stub):
160 160 model = VcsSettingsModel(repo=repo_stub.repo_name)
161 161 expected_result = {'test': 'test'}
162 162 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
163 163 collect_mock.return_value = expected_result
164 164 result = model.get_repo_settings()
165 165
166 166 collect_mock.assert_called_once_with(global_=False)
167 167 assert result == expected_result
168 168
169 169 @pytest.mark.parametrize('settings, global_', [
170 170 ('global_settings', True),
171 171 ('repo_settings', False)
172 172 ])
173 173 def test_collect_all_settings(self, settings, global_):
174 174 model = VcsSettingsModel()
175 175 result_mock = self._mock_result()
176 176
177 177 settings_patch = mock.patch.object(model, settings)
178 178 with settings_patch as settings_mock:
179 179 settings_mock.get_ui_by_section_and_key.return_value = result_mock
180 180 settings_mock.get_setting_by_name.return_value = result_mock
181 181 result = model._collect_all_settings(global_=global_)
182 182
183 183 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
184 184 self._assert_get_settings_calls(
185 185 settings_mock, ui_settings, model.GENERAL_SETTINGS)
186 186 self._assert_collect_all_settings_result(
187 187 ui_settings, model.GENERAL_SETTINGS, result)
188 188
189 189 @pytest.mark.parametrize('settings, global_', [
190 190 ('global_settings', True),
191 191 ('repo_settings', False)
192 192 ])
193 193 def test_collect_all_settings_without_empty_value(self, settings, global_):
194 194 model = VcsSettingsModel()
195 195
196 196 settings_patch = mock.patch.object(model, settings)
197 197 with settings_patch as settings_mock:
198 198 settings_mock.get_ui_by_section_and_key.return_value = None
199 199 settings_mock.get_setting_by_name.return_value = None
200 200 result = model._collect_all_settings(global_=global_)
201 201
202 202 assert result == {}
203 203
204 204 def _mock_result(self):
205 205 result_mock = mock.Mock()
206 206 result_mock.ui_value = 'ui_value'
207 207 result_mock.ui_active = True
208 208 result_mock.app_settings_value = 'setting_value'
209 209 return result_mock
210 210
211 211 def _assert_get_settings_calls(
212 212 self, settings_mock, ui_settings, general_settings):
213 213 assert (
214 214 settings_mock.get_ui_by_section_and_key.call_count ==
215 215 len(ui_settings))
216 216 assert (
217 217 settings_mock.get_setting_by_name.call_count ==
218 218 len(general_settings))
219 219
220 220 for section, key in ui_settings:
221 221 expected_call = mock.call(section, key)
222 222 assert (
223 223 expected_call in
224 224 settings_mock.get_ui_by_section_and_key.call_args_list)
225 225
226 226 for name in general_settings:
227 227 expected_call = mock.call(name)
228 228 assert (
229 229 expected_call in
230 230 settings_mock.get_setting_by_name.call_args_list)
231 231
232 232 def _assert_collect_all_settings_result(
233 233 self, ui_settings, general_settings, result):
234 234 expected_result = {}
235 235 for section, key in ui_settings:
236 236 key = '{}_{}'.format(section, key.replace('.', '_'))
237 237
238 238 if section in ('extensions', 'hooks'):
239 239 value = True
240 240 elif key in ['vcs_git_lfs_enabled']:
241 241 value = True
242 242 else:
243 243 value = 'ui_value'
244 244 expected_result[key] = value
245 245
246 246 for name in general_settings:
247 247 key = 'rhodecode_' + name
248 248 expected_result[key] = 'setting_value'
249 249
250 250 assert expected_result == result
251 251
252 252
253 253 class TestCreateOrUpdateRepoHookSettings(object):
254 254 def test_create_when_no_repo_object_found(self, repo_stub):
255 255 model = VcsSettingsModel(repo=repo_stub.repo_name)
256 256
257 257 self._create_settings(model, HOOKS_FORM_DATA)
258 258
259 259 cleanup = []
260 260 try:
261 261 for section, key in model.HOOKS_SETTINGS:
262 262 ui = model.repo_settings.get_ui_by_section_and_key(
263 263 section, key)
264 264 assert ui.ui_active is True
265 265 cleanup.append(ui)
266 266 finally:
267 267 for ui in cleanup:
268 268 Session().delete(ui)
269 269 Session().commit()
270 270
271 271 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
272 272 model = VcsSettingsModel(repo=repo_stub.repo_name)
273 273
274 274 deleted_key = 'hooks_changegroup_repo_size'
275 275 data = HOOKS_FORM_DATA.copy()
276 276 data.pop(deleted_key)
277 277
278 278 with pytest.raises(ValueError) as exc_info:
279 279 model.create_or_update_repo_hook_settings(data)
280 280 msg = 'The given data does not contain {} key'.format(deleted_key)
281 281 assert str(exc_info.value) == msg
282 282
283 283 def test_update_when_repo_object_found(self, repo_stub, settings_util):
284 284 model = VcsSettingsModel(repo=repo_stub.repo_name)
285 285 for section, key in model.HOOKS_SETTINGS:
286 286 settings_util.create_repo_rhodecode_ui(
287 287 repo_stub, section, None, key=key, active=False)
288 288 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
289 289 for section, key in model.HOOKS_SETTINGS:
290 290 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
291 291 assert ui.ui_active is True
292 292
293 293 def _create_settings(self, model, data):
294 294 global_patch = mock.patch.object(model, 'global_settings')
295 295 global_setting = mock.Mock()
296 296 global_setting.ui_value = 'Test value'
297 297 with global_patch as global_mock:
298 298 global_mock.get_ui_by_section_and_key.return_value = global_setting
299 299 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
300 300
301 301
302 302 class TestUpdateGlobalHookSettings(object):
303 303 def test_update_raises_exception_when_data_incomplete(self):
304 304 model = VcsSettingsModel()
305 305
306 306 deleted_key = 'hooks_changegroup_repo_size'
307 307 data = HOOKS_FORM_DATA.copy()
308 308 data.pop(deleted_key)
309 309
310 310 with pytest.raises(ValueError) as exc_info:
311 311 model.update_global_hook_settings(data)
312 312 msg = 'The given data does not contain {} key'.format(deleted_key)
313 313 assert str(exc_info.value) == msg
314 314
315 315 def test_update_global_hook_settings(self, settings_util):
316 316 model = VcsSettingsModel()
317 317 setting_mock = mock.MagicMock()
318 318 setting_mock.ui_active = False
319 319 get_settings_patcher = mock.patch.object(
320 320 model.global_settings, 'get_ui_by_section_and_key',
321 321 return_value=setting_mock)
322 322 session_patcher = mock.patch('rhodecode.model.settings.Session')
323 323 with get_settings_patcher as get_settings_mock, session_patcher:
324 324 model.update_global_hook_settings(HOOKS_FORM_DATA)
325 325 assert setting_mock.ui_active is True
326 326 assert get_settings_mock.call_count == 3
327 327
328 328
329 329 class TestCreateOrUpdateRepoGeneralSettings(object):
330 330 def test_calls_create_or_update_general_settings(self, repo_stub):
331 331 model = VcsSettingsModel(repo=repo_stub.repo_name)
332 332 create_patch = mock.patch.object(
333 333 model, '_create_or_update_general_settings')
334 334 with create_patch as create_mock:
335 335 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
336 336 create_mock.assert_called_once_with(
337 337 model.repo_settings, GENERAL_FORM_DATA)
338 338
339 339 def test_raises_exception_when_repository_is_not_specified(self):
340 340 model = VcsSettingsModel()
341 341 with pytest.raises(Exception) as exc_info:
342 342 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
343 343 assert str(exc_info.value) == 'Repository is not specified'
344 344
345 345
346 346 class TestCreateOrUpdatGlobalGeneralSettings(object):
347 347 def test_calls_create_or_update_general_settings(self):
348 348 model = VcsSettingsModel()
349 349 create_patch = mock.patch.object(
350 350 model, '_create_or_update_general_settings')
351 351 with create_patch as create_mock:
352 352 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
353 353 create_mock.assert_called_once_with(
354 354 model.global_settings, GENERAL_FORM_DATA)
355 355
356 356
357 357 class TestCreateOrUpdateGeneralSettings(object):
358 358 def test_create_when_no_repo_settings_found(self, repo_stub):
359 359 model = VcsSettingsModel(repo=repo_stub.repo_name)
360 360 model._create_or_update_general_settings(
361 361 model.repo_settings, GENERAL_FORM_DATA)
362 362
363 363 cleanup = []
364 364 try:
365 365 for name in model.GENERAL_SETTINGS:
366 366 setting = model.repo_settings.get_setting_by_name(name)
367 367 assert setting.app_settings_value is True
368 368 cleanup.append(setting)
369 369 finally:
370 370 for setting in cleanup:
371 371 Session().delete(setting)
372 372 Session().commit()
373 373
374 374 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
375 375 model = VcsSettingsModel(repo=repo_stub.repo_name)
376 376
377 377 deleted_key = 'rhodecode_pr_merge_enabled'
378 378 data = GENERAL_FORM_DATA.copy()
379 379 data.pop(deleted_key)
380 380
381 381 with pytest.raises(ValueError) as exc_info:
382 382 model._create_or_update_general_settings(model.repo_settings, data)
383 383
384 384 msg = 'The given data does not contain {} key'.format(deleted_key)
385 385 assert str(exc_info.value) == msg
386 386
387 387 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
388 388 model = VcsSettingsModel(repo=repo_stub.repo_name)
389 389 for name in model.GENERAL_SETTINGS:
390 390 settings_util.create_repo_rhodecode_setting(
391 391 repo_stub, name, False, 'bool')
392 392
393 393 model._create_or_update_general_settings(
394 394 model.repo_settings, GENERAL_FORM_DATA)
395 395
396 396 for name in model.GENERAL_SETTINGS:
397 397 setting = model.repo_settings.get_setting_by_name(name)
398 398 assert setting.app_settings_value is True
399 399
400 400
401 401 class TestCreateRepoSvnSettings(object):
402 402 def test_calls_create_svn_settings(self, repo_stub):
403 403 model = VcsSettingsModel(repo=repo_stub.repo_name)
404 404 with mock.patch.object(model, '_create_svn_settings') as create_mock:
405 405 model.create_repo_svn_settings(SVN_FORM_DATA)
406 406 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
407 407
408 408 def test_raises_exception_when_repository_is_not_specified(self):
409 409 model = VcsSettingsModel()
410 410 with pytest.raises(Exception) as exc_info:
411 411 model.create_repo_svn_settings(SVN_FORM_DATA)
412 412 assert str(exc_info.value) == 'Repository is not specified'
413 413
414 414
415 415 class TestCreateSvnSettings(object):
416 416 def test_create(self, repo_stub):
417 417 model = VcsSettingsModel(repo=repo_stub.repo_name)
418 418 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
419 419 Session().commit()
420 420
421 421 branch_ui = model.repo_settings.get_ui_by_section(
422 422 model.SVN_BRANCH_SECTION)
423 423 tag_ui = model.repo_settings.get_ui_by_section(
424 424 model.SVN_TAG_SECTION)
425 425
426 426 try:
427 427 assert len(branch_ui) == 1
428 428 assert len(tag_ui) == 1
429 429 finally:
430 430 Session().delete(branch_ui[0])
431 431 Session().delete(tag_ui[0])
432 432 Session().commit()
433 433
434 434 def test_create_tag(self, repo_stub):
435 435 model = VcsSettingsModel(repo=repo_stub.repo_name)
436 436 data = SVN_FORM_DATA.copy()
437 437 data.pop('new_svn_branch')
438 438 model._create_svn_settings(model.repo_settings, data)
439 439 Session().commit()
440 440
441 441 branch_ui = model.repo_settings.get_ui_by_section(
442 442 model.SVN_BRANCH_SECTION)
443 443 tag_ui = model.repo_settings.get_ui_by_section(
444 444 model.SVN_TAG_SECTION)
445 445
446 446 try:
447 447 assert len(branch_ui) == 0
448 448 assert len(tag_ui) == 1
449 449 finally:
450 450 Session().delete(tag_ui[0])
451 451 Session().commit()
452 452
453 453 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
454 454 model = VcsSettingsModel(repo=repo_stub.repo_name)
455 455 model._create_svn_settings(model.repo_settings, {})
456 456 Session().commit()
457 457
458 458 branch_ui = model.repo_settings.get_ui_by_section(
459 459 model.SVN_BRANCH_SECTION)
460 460 tag_ui = model.repo_settings.get_ui_by_section(
461 461 model.SVN_TAG_SECTION)
462 462
463 463 assert len(branch_ui) == 0
464 464 assert len(tag_ui) == 0
465 465
466 466 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
467 467 model = VcsSettingsModel(repo=repo_stub.repo_name)
468 468 data = {
469 469 'new_svn_branch': '',
470 470 'new_svn_tag': ''
471 471 }
472 472 model._create_svn_settings(model.repo_settings, data)
473 473 Session().commit()
474 474
475 475 branch_ui = model.repo_settings.get_ui_by_section(
476 476 model.SVN_BRANCH_SECTION)
477 477 tag_ui = model.repo_settings.get_ui_by_section(
478 478 model.SVN_TAG_SECTION)
479 479
480 480 assert len(branch_ui) == 0
481 481 assert len(tag_ui) == 0
482 482
483 483
484 484 class TestCreateOrUpdateUi(object):
485 485 def test_create(self, repo_stub):
486 486 model = VcsSettingsModel(repo=repo_stub.repo_name)
487 487 model._create_or_update_ui(
488 488 model.repo_settings, 'test-section', 'test-key', active=False,
489 489 value='False')
490 490 Session().commit()
491 491
492 492 created_ui = model.repo_settings.get_ui_by_section_and_key(
493 493 'test-section', 'test-key')
494 494
495 495 try:
496 496 assert created_ui.ui_active is False
497 497 assert str2bool(created_ui.ui_value) is False
498 498 finally:
499 499 Session().delete(created_ui)
500 500 Session().commit()
501 501
502 502 def test_update(self, repo_stub, settings_util):
503 503 model = VcsSettingsModel(repo=repo_stub.repo_name)
504
505 largefiles, phases, evolve = model.HG_SETTINGS
504 # care about only 3 first settings
505 largefiles, phases, evolve = model.HG_SETTINGS[:3]
506 506
507 507 section = 'test-section'
508 508 key = 'test-key'
509 509 settings_util.create_repo_rhodecode_ui(
510 510 repo_stub, section, 'True', key=key, active=True)
511 511
512 512 model._create_or_update_ui(
513 513 model.repo_settings, section, key, active=False, value='False')
514 514 Session().commit()
515 515
516 516 created_ui = model.repo_settings.get_ui_by_section_and_key(
517 517 section, key)
518 518 assert created_ui.ui_active is False
519 519 assert str2bool(created_ui.ui_value) is False
520 520
521 521
522 522 class TestCreateOrUpdateRepoHgSettings(object):
523 523 FORM_DATA = {
524 524 'extensions_largefiles': False,
525 525 'extensions_evolve': False,
526 526 'phases_publish': False
527 527 }
528 528
529 529 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
530 530 model = VcsSettingsModel(repo=repo_stub.repo_name)
531 531 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
532 532 model.create_or_update_repo_hg_settings(self.FORM_DATA)
533 533 expected_calls = [
534 mock.call(model.repo_settings, 'extensions', 'largefiles',
535 active=False, value=''),
536 mock.call(model.repo_settings, 'extensions', 'evolve',
537 active=False, value=''),
534 mock.call(model.repo_settings, 'extensions', 'largefiles', active=False, value=''),
535 mock.call(model.repo_settings, 'extensions', 'evolve', active=False, value=''),
536 mock.call(model.repo_settings, 'experimental', 'evolution', active=False, value=''),
537 mock.call(model.repo_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
538 mock.call(model.repo_settings, 'extensions', 'topic', active=False, value=''),
538 539 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
539 540 ]
540 541 assert expected_calls == create_mock.call_args_list
541 542
542 543 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
543 544 def test_key_is_not_found(self, repo_stub, field_to_remove):
544 545 model = VcsSettingsModel(repo=repo_stub.repo_name)
545 546 data = self.FORM_DATA.copy()
546 547 data.pop(field_to_remove)
547 548 with pytest.raises(ValueError) as exc_info:
548 549 model.create_or_update_repo_hg_settings(data)
549 550 expected_message = 'The given data does not contain {} key'.format(
550 551 field_to_remove)
551 552 assert str(exc_info.value) == expected_message
552 553
553 554 def test_create_raises_exception_when_repository_not_specified(self):
554 555 model = VcsSettingsModel()
555 556 with pytest.raises(Exception) as exc_info:
556 557 model.create_or_update_repo_hg_settings(self.FORM_DATA)
557 558 assert str(exc_info.value) == 'Repository is not specified'
558 559
559 560
560 561 class TestUpdateGlobalSslSetting(object):
561 562 def test_updates_global_hg_settings(self):
562 563 model = VcsSettingsModel()
563 564 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
564 565 model.update_global_ssl_setting('False')
565 566 create_mock.assert_called_once_with(
566 567 model.global_settings, 'web', 'push_ssl', value='False')
567 568
568 569
569 570 class TestUpdateGlobalPathSetting(object):
570 571 def test_updates_global_path_settings(self):
571 572 model = VcsSettingsModel()
572 573 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
573 574 model.update_global_path_setting('False')
574 575 create_mock.assert_called_once_with(
575 576 model.global_settings, 'paths', '/', value='False')
576 577
577 578
578 579 class TestCreateOrUpdateGlobalHgSettings(object):
579 580 FORM_DATA = {
580 581 'extensions_largefiles': False,
581 582 'largefiles_usercache': '/example/largefiles-store',
582 583 'phases_publish': False,
583 584 'extensions_hgsubversion': False,
584 585 'extensions_evolve': False
585 586 }
586 587
587 588 def test_creates_repo_hg_settings_when_data_is_correct(self):
588 589 model = VcsSettingsModel()
589 590 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
590 591 model.create_or_update_global_hg_settings(self.FORM_DATA)
591 592 expected_calls = [
592 mock.call(model.global_settings, 'extensions', 'largefiles',
593 active=False, value=''),
594 mock.call(model.global_settings, 'largefiles', 'usercache',
595 value='/example/largefiles-store'),
596 mock.call(model.global_settings, 'phases', 'publish',
597 value='False'),
598 mock.call(model.global_settings, 'extensions', 'hgsubversion',
599 active=False),
600 mock.call(model.global_settings, 'extensions', 'evolve',
601 active=False, value='')
593 mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''),
594 mock.call(model.global_settings, 'largefiles', 'usercache', value='/example/largefiles-store'),
595 mock.call(model.global_settings, 'phases', 'publish', value='False'),
596 mock.call(model.global_settings, 'extensions', 'hgsubversion', active=False),
597 mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''),
598 mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''),
599 mock.call(model.global_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
600 mock.call(model.global_settings, 'extensions', 'topic', active=False, value=''),
602 601 ]
602
603 603 assert expected_calls == create_mock.call_args_list
604 604
605 605 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
606 606 def test_key_is_not_found(self, repo_stub, field_to_remove):
607 607 model = VcsSettingsModel(repo=repo_stub.repo_name)
608 608 data = self.FORM_DATA.copy()
609 609 data.pop(field_to_remove)
610 610 with pytest.raises(Exception) as exc_info:
611 611 model.create_or_update_global_hg_settings(data)
612 612 expected_message = 'The given data does not contain {} key'.format(
613 613 field_to_remove)
614 614 assert str(exc_info.value) == expected_message
615 615
616 616
617 617 class TestCreateOrUpdateGlobalGitSettings(object):
618 618 FORM_DATA = {
619 619 'vcs_git_lfs_enabled': False,
620 620 'vcs_git_lfs_store_location': '/example/lfs-store',
621 621 }
622 622
623 623 def test_creates_repo_hg_settings_when_data_is_correct(self):
624 624 model = VcsSettingsModel()
625 625 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
626 626 model.create_or_update_global_git_settings(self.FORM_DATA)
627 627 expected_calls = [
628 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled',
629 active=False, value=False),
630 mock.call(model.global_settings, 'vcs_git_lfs', 'store_location',
631 value='/example/lfs-store'),
628 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False),
629 mock.call(model.global_settings, 'vcs_git_lfs', 'store_location', value='/example/lfs-store'),
632 630 ]
633 631 assert expected_calls == create_mock.call_args_list
634 632
635 633
636 634 class TestDeleteRepoSvnPattern(object):
637 635 def test_success_when_repo_is_set(self, backend_svn, settings_util):
638 636 repo = backend_svn.create_repo()
639 637 repo_name = repo.repo_name
640 638
641 639 model = VcsSettingsModel(repo=repo_name)
642 640 entry = settings_util.create_repo_rhodecode_ui(
643 641 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
644 642 Session().commit()
645 643
646 644 model.delete_repo_svn_pattern(entry.ui_id)
647 645
648 646 def test_fail_when_delete_id_from_other_repo(self, backend_svn):
649 647 repo_name = backend_svn.repo_name
650 648 model = VcsSettingsModel(repo=repo_name)
651 649 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
652 650 with delete_ui_patch as delete_ui_mock:
653 651 model.delete_repo_svn_pattern(123)
654 652 delete_ui_mock.assert_called_once_with(-1)
655 653
656 654 def test_raises_exception_when_repository_is_not_specified(self):
657 655 model = VcsSettingsModel()
658 656 with pytest.raises(Exception) as exc_info:
659 657 model.delete_repo_svn_pattern(123)
660 658 assert str(exc_info.value) == 'Repository is not specified'
661 659
662 660
663 661 class TestDeleteGlobalSvnPattern(object):
664 662 def test_delete_global_svn_pattern_calls_delete_ui(self):
665 663 model = VcsSettingsModel()
666 664 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
667 665 with delete_ui_patch as delete_ui_mock:
668 666 model.delete_global_svn_pattern(123)
669 667 delete_ui_mock.assert_called_once_with(123)
670 668
671 669
672 670 class TestFilterUiSettings(object):
673 671 def test_settings_are_filtered(self):
674 672 model = VcsSettingsModel()
675 673 repo_settings = [
676 674 UiSetting('extensions', 'largefiles', '', True),
677 675 UiSetting('phases', 'publish', 'True', True),
678 676 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
679 677 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
680 678 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
681 679 UiSetting(
682 680 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
683 681 'test_branch', True),
684 682 UiSetting(
685 683 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
686 684 'test_tag', True),
687 685 ]
688 686 non_repo_settings = [
689 687 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
690 688 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
691 689 UiSetting('hooks', 'test2', 'hook', True),
692 690 UiSetting(
693 691 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
694 692 'test_tag', True),
695 693 ]
696 694 settings = repo_settings + non_repo_settings
697 695 filtered_settings = model._filter_ui_settings(settings)
698 696 assert sorted(filtered_settings) == sorted(repo_settings)
699 697
700 698
701 699 class TestFilterGeneralSettings(object):
702 700 def test_settings_are_filtered(self):
703 701 model = VcsSettingsModel()
704 702 settings = {
705 703 'rhodecode_abcde': 'value1',
706 704 'rhodecode_vwxyz': 'value2',
707 705 }
708 706 general_settings = {
709 707 'rhodecode_{}'.format(key): 'value'
710 708 for key in VcsSettingsModel.GENERAL_SETTINGS
711 709 }
712 710 settings.update(general_settings)
713 711
714 712 filtered_settings = model._filter_general_settings(general_settings)
715 713 assert sorted(filtered_settings) == sorted(general_settings)
716 714
717 715
718 716 class TestGetRepoUiSettings(object):
719 717 def test_global_uis_are_returned_when_no_repo_uis_found(
720 718 self, repo_stub):
721 719 model = VcsSettingsModel(repo=repo_stub.repo_name)
722 720 result = model.get_repo_ui_settings()
723 721 svn_sections = (
724 722 VcsSettingsModel.SVN_TAG_SECTION,
725 723 VcsSettingsModel.SVN_BRANCH_SECTION)
726 724 expected_result = [
727 725 s for s in model.global_settings.get_ui()
728 726 if s.section not in svn_sections]
729 727 assert sorted(result) == sorted(expected_result)
730 728
731 729 def test_repo_uis_are_overriding_global_uis(
732 730 self, repo_stub, settings_util):
733 731 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
734 732 settings_util.create_repo_rhodecode_ui(
735 733 repo_stub, section, 'repo', key=key, active=False)
736 734 model = VcsSettingsModel(repo=repo_stub.repo_name)
737 735 result = model.get_repo_ui_settings()
738 736 for setting in result:
739 737 locator = (setting.section, setting.key)
740 738 if locator in VcsSettingsModel.HOOKS_SETTINGS:
741 739 assert setting.value == 'repo'
742 740
743 741 assert setting.active is False
744 742
745 743 def test_global_svn_patterns_are_not_in_list(
746 744 self, repo_stub, settings_util):
747 745 svn_sections = (
748 746 VcsSettingsModel.SVN_TAG_SECTION,
749 747 VcsSettingsModel.SVN_BRANCH_SECTION)
750 748 for section in svn_sections:
751 749 settings_util.create_rhodecode_ui(
752 750 section, 'repo', key='deadbeef' + section, active=False)
753 751 model = VcsSettingsModel(repo=repo_stub.repo_name)
754 752 result = model.get_repo_ui_settings()
755 753 for setting in result:
756 754 assert setting.section not in svn_sections
757 755
758 756 def test_repo_uis_filtered_by_section_are_returned(
759 757 self, repo_stub, settings_util):
760 758 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
761 759 settings_util.create_repo_rhodecode_ui(
762 760 repo_stub, section, 'repo', key=key, active=False)
763 761 model = VcsSettingsModel(repo=repo_stub.repo_name)
764 762 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
765 763 result = model.get_repo_ui_settings(section=section)
766 764 for setting in result:
767 765 assert setting.section == section
768 766
769 767 def test_repo_uis_filtered_by_key_are_returned(
770 768 self, repo_stub, settings_util):
771 769 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
772 770 settings_util.create_repo_rhodecode_ui(
773 771 repo_stub, section, 'repo', key=key, active=False)
774 772 model = VcsSettingsModel(repo=repo_stub.repo_name)
775 773 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
776 774 result = model.get_repo_ui_settings(key=key)
777 775 for setting in result:
778 776 assert setting.key == key
779 777
780 778 def test_raises_exception_when_repository_is_not_specified(self):
781 779 model = VcsSettingsModel()
782 780 with pytest.raises(Exception) as exc_info:
783 781 model.get_repo_ui_settings()
784 782 assert str(exc_info.value) == 'Repository is not specified'
785 783
786 784
787 785 class TestGetRepoGeneralSettings(object):
788 786 def test_global_settings_are_returned_when_no_repo_settings_found(
789 787 self, repo_stub):
790 788 model = VcsSettingsModel(repo=repo_stub.repo_name)
791 789 result = model.get_repo_general_settings()
792 790 expected_result = model.global_settings.get_all_settings()
793 791 assert sorted(result) == sorted(expected_result)
794 792
795 793 def test_repo_uis_are_overriding_global_uis(
796 794 self, repo_stub, settings_util):
797 795 for key in VcsSettingsModel.GENERAL_SETTINGS:
798 796 settings_util.create_repo_rhodecode_setting(
799 797 repo_stub, key, 'abcde', type_='unicode')
800 798 model = VcsSettingsModel(repo=repo_stub.repo_name)
801 799 result = model.get_repo_ui_settings()
802 800 for key in result:
803 801 if key in VcsSettingsModel.GENERAL_SETTINGS:
804 802 assert result[key] == 'abcde'
805 803
806 804 def test_raises_exception_when_repository_is_not_specified(self):
807 805 model = VcsSettingsModel()
808 806 with pytest.raises(Exception) as exc_info:
809 807 model.get_repo_general_settings()
810 808 assert str(exc_info.value) == 'Repository is not specified'
811 809
812 810
813 811 class TestGetGlobalGeneralSettings(object):
814 812 def test_global_settings_are_returned(self, repo_stub):
815 813 model = VcsSettingsModel()
816 814 result = model.get_global_general_settings()
817 815 expected_result = model.global_settings.get_all_settings()
818 816 assert sorted(result) == sorted(expected_result)
819 817
820 818 def test_repo_uis_are_not_overriding_global_uis(
821 819 self, repo_stub, settings_util):
822 820 for key in VcsSettingsModel.GENERAL_SETTINGS:
823 821 settings_util.create_repo_rhodecode_setting(
824 822 repo_stub, key, 'abcde', type_='unicode')
825 823 model = VcsSettingsModel(repo=repo_stub.repo_name)
826 824 result = model.get_global_general_settings()
827 825 expected_result = model.global_settings.get_all_settings()
828 826 assert sorted(result) == sorted(expected_result)
829 827
830 828
831 829 class TestGetGlobalUiSettings(object):
832 830 def test_global_uis_are_returned(self, repo_stub):
833 831 model = VcsSettingsModel()
834 832 result = model.get_global_ui_settings()
835 833 expected_result = model.global_settings.get_ui()
836 834 assert sorted(result) == sorted(expected_result)
837 835
838 836 def test_repo_uis_are_not_overriding_global_uis(
839 837 self, repo_stub, settings_util):
840 838 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
841 839 settings_util.create_repo_rhodecode_ui(
842 840 repo_stub, section, 'repo', key=key, active=False)
843 841 model = VcsSettingsModel(repo=repo_stub.repo_name)
844 842 result = model.get_global_ui_settings()
845 843 expected_result = model.global_settings.get_ui()
846 844 assert sorted(result) == sorted(expected_result)
847 845
848 846 def test_ui_settings_filtered_by_section(
849 847 self, repo_stub, settings_util):
850 848 model = VcsSettingsModel(repo=repo_stub.repo_name)
851 849 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
852 850 result = model.get_global_ui_settings(section=section)
853 851 expected_result = model.global_settings.get_ui(section=section)
854 852 assert sorted(result) == sorted(expected_result)
855 853
856 854 def test_ui_settings_filtered_by_key(
857 855 self, repo_stub, settings_util):
858 856 model = VcsSettingsModel(repo=repo_stub.repo_name)
859 857 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
860 858 result = model.get_global_ui_settings(key=key)
861 859 expected_result = model.global_settings.get_ui(key=key)
862 860 assert sorted(result) == sorted(expected_result)
863 861
864 862
865 863 class TestGetGeneralSettings(object):
866 864 def test_global_settings_are_returned_when_inherited_is_true(
867 865 self, repo_stub, settings_util):
868 866 model = VcsSettingsModel(repo=repo_stub.repo_name)
869 867 model.inherit_global_settings = True
870 868 for key in VcsSettingsModel.GENERAL_SETTINGS:
871 869 settings_util.create_repo_rhodecode_setting(
872 870 repo_stub, key, 'abcde', type_='unicode')
873 871 result = model.get_general_settings()
874 872 expected_result = model.get_global_general_settings()
875 873 assert sorted(result) == sorted(expected_result)
876 874
877 875 def test_repo_settings_are_returned_when_inherited_is_false(
878 876 self, repo_stub, settings_util):
879 877 model = VcsSettingsModel(repo=repo_stub.repo_name)
880 878 model.inherit_global_settings = False
881 879 for key in VcsSettingsModel.GENERAL_SETTINGS:
882 880 settings_util.create_repo_rhodecode_setting(
883 881 repo_stub, key, 'abcde', type_='unicode')
884 882 result = model.get_general_settings()
885 883 expected_result = model.get_repo_general_settings()
886 884 assert sorted(result) == sorted(expected_result)
887 885
888 886 def test_global_settings_are_returned_when_no_repository_specified(self):
889 887 model = VcsSettingsModel()
890 888 result = model.get_general_settings()
891 889 expected_result = model.get_global_general_settings()
892 890 assert sorted(result) == sorted(expected_result)
893 891
894 892
895 893 class TestGetUiSettings(object):
896 894 def test_global_settings_are_returned_when_inherited_is_true(
897 895 self, repo_stub, settings_util):
898 896 model = VcsSettingsModel(repo=repo_stub.repo_name)
899 897 model.inherit_global_settings = True
900 898 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
901 899 settings_util.create_repo_rhodecode_ui(
902 900 repo_stub, section, 'repo', key=key, active=True)
903 901 result = model.get_ui_settings()
904 902 expected_result = model.get_global_ui_settings()
905 903 assert sorted(result) == sorted(expected_result)
906 904
907 905 def test_repo_settings_are_returned_when_inherited_is_false(
908 906 self, repo_stub, settings_util):
909 907 model = VcsSettingsModel(repo=repo_stub.repo_name)
910 908 model.inherit_global_settings = False
911 909 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
912 910 settings_util.create_repo_rhodecode_ui(
913 911 repo_stub, section, 'repo', key=key, active=True)
914 912 result = model.get_ui_settings()
915 913 expected_result = model.get_repo_ui_settings()
916 914 assert sorted(result) == sorted(expected_result)
917 915
918 916 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
919 917 model = VcsSettingsModel(repo=repo_stub.repo_name)
920 918 model.inherit_global_settings = False
921 919 args = ('section', 'key')
922 920 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
923 921 model.get_ui_settings(*args)
924 922 settings_mock.assert_called_once_with(*args)
925 923
926 924 def test_global_settings_filtered_by_section_and_key(self):
927 925 model = VcsSettingsModel()
928 926 args = ('section', 'key')
929 927 with mock.patch.object(model, 'get_global_ui_settings') as (
930 928 settings_mock):
931 929 model.get_ui_settings(*args)
932 930 settings_mock.assert_called_once_with(*args)
933 931
934 932 def test_global_settings_are_returned_when_no_repository_specified(self):
935 933 model = VcsSettingsModel()
936 934 result = model.get_ui_settings()
937 935 expected_result = model.get_global_ui_settings()
938 936 assert sorted(result) == sorted(expected_result)
939 937
940 938
941 939 class TestGetSvnPatterns(object):
942 940 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
943 941 model = VcsSettingsModel(repo=repo_stub.repo_name)
944 942 args = ('section', )
945 943 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
946 944 model.get_svn_patterns(*args)
947 945 settings_mock.assert_called_once_with(*args)
948 946
949 947 def test_global_settings_filtered_by_section_and_key(self):
950 948 model = VcsSettingsModel()
951 949 args = ('section', )
952 950 with mock.patch.object(model, 'get_global_ui_settings') as (
953 951 settings_mock):
954 952 model.get_svn_patterns(*args)
955 953 settings_mock.assert_called_once_with(*args)
956 954
957 955
958 956 class TestGetReposLocation(object):
959 957 def test_returns_repos_location(self, repo_stub):
960 958 model = VcsSettingsModel()
961 959
962 960 result_mock = mock.Mock()
963 961 result_mock.ui_value = '/tmp'
964 962
965 963 with mock.patch.object(model, 'global_settings') as settings_mock:
966 964 settings_mock.get_ui_by_key.return_value = result_mock
967 965 result = model.get_repos_location()
968 966
969 967 settings_mock.get_ui_by_key.assert_called_once_with('/')
970 968 assert result == '/tmp'
971 969
972 970
973 971 class TestCreateOrUpdateRepoSettings(object):
974 972 FORM_DATA = {
975 973 'inherit_global_settings': False,
976 974 'hooks_changegroup_repo_size': False,
977 975 'hooks_changegroup_push_logger': False,
978 976 'hooks_outgoing_pull_logger': False,
979 977 'extensions_largefiles': False,
980 978 'extensions_evolve': False,
981 979 'largefiles_usercache': '/example/largefiles-store',
982 980 'vcs_git_lfs_enabled': False,
983 981 'vcs_git_lfs_store_location': '/',
984 982 'phases_publish': 'False',
985 983 'rhodecode_pr_merge_enabled': False,
986 984 'rhodecode_use_outdated_comments': False,
987 985 'new_svn_branch': '',
988 986 'new_svn_tag': ''
989 987 }
990 988
991 989 def test_get_raises_exception_when_repository_not_specified(self):
992 990 model = VcsSettingsModel()
993 991 with pytest.raises(Exception) as exc_info:
994 992 model.create_or_update_repo_settings(data=self.FORM_DATA)
995 993 assert str(exc_info.value) == 'Repository is not specified'
996 994
997 995 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
998 996 repo = backend_svn.create_repo()
999 997 model = VcsSettingsModel(repo=repo)
1000 998 with self._patch_model(model) as mocks:
1001 999 model.create_or_update_repo_settings(
1002 1000 data=self.FORM_DATA, inherit_global_settings=False)
1003 1001 mocks['create_repo_svn_settings'].assert_called_once_with(
1004 1002 self.FORM_DATA)
1005 1003 non_called_methods = (
1006 1004 'create_or_update_repo_hook_settings',
1007 1005 'create_or_update_repo_pr_settings',
1008 1006 'create_or_update_repo_hg_settings')
1009 1007 for method in non_called_methods:
1010 1008 assert mocks[method].call_count == 0
1011 1009
1012 1010 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1013 1011 repo = backend_hg.create_repo()
1014 1012 model = VcsSettingsModel(repo=repo)
1015 1013 with self._patch_model(model) as mocks:
1016 1014 model.create_or_update_repo_settings(
1017 1015 data=self.FORM_DATA, inherit_global_settings=False)
1018 1016
1019 1017 assert mocks['create_repo_svn_settings'].call_count == 0
1020 1018 called_methods = (
1021 1019 'create_or_update_repo_hook_settings',
1022 1020 'create_or_update_repo_pr_settings',
1023 1021 'create_or_update_repo_hg_settings')
1024 1022 for method in called_methods:
1025 1023 mocks[method].assert_called_once_with(self.FORM_DATA)
1026 1024
1027 1025 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1028 1026 self, backend_git):
1029 1027 repo = backend_git.create_repo()
1030 1028 model = VcsSettingsModel(repo=repo)
1031 1029 with self._patch_model(model) as mocks:
1032 1030 model.create_or_update_repo_settings(
1033 1031 data=self.FORM_DATA, inherit_global_settings=False)
1034 1032
1035 1033 assert mocks['create_repo_svn_settings'].call_count == 0
1036 1034 called_methods = (
1037 1035 'create_or_update_repo_hook_settings',
1038 1036 'create_or_update_repo_pr_settings')
1039 1037 non_called_methods = (
1040 1038 'create_repo_svn_settings',
1041 1039 'create_or_update_repo_hg_settings'
1042 1040 )
1043 1041 for method in called_methods:
1044 1042 mocks[method].assert_called_once_with(self.FORM_DATA)
1045 1043 for method in non_called_methods:
1046 1044 assert mocks[method].call_count == 0
1047 1045
1048 1046 def test_no_methods_are_called_when_settings_are_inherited(
1049 1047 self, backend):
1050 1048 repo = backend.create_repo()
1051 1049 model = VcsSettingsModel(repo=repo)
1052 1050 with self._patch_model(model) as mocks:
1053 1051 model.create_or_update_repo_settings(
1054 1052 data=self.FORM_DATA, inherit_global_settings=True)
1055 1053 for method_name in mocks:
1056 1054 assert mocks[method_name].call_count == 0
1057 1055
1058 1056 def test_cache_is_marked_for_invalidation(self, repo_stub):
1059 1057 model = VcsSettingsModel(repo=repo_stub)
1060 1058 invalidation_patcher = mock.patch(
1061 1059 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1062 1060 with invalidation_patcher as invalidation_mock:
1063 1061 model.create_or_update_repo_settings(
1064 1062 data=self.FORM_DATA, inherit_global_settings=True)
1065 1063 invalidation_mock.assert_called_once_with(
1066 1064 repo_stub.repo_name, delete=True)
1067 1065
1068 1066 def test_inherit_flag_is_saved(self, repo_stub):
1069 1067 model = VcsSettingsModel(repo=repo_stub)
1070 1068 model.inherit_global_settings = True
1071 1069 with self._patch_model(model):
1072 1070 model.create_or_update_repo_settings(
1073 1071 data=self.FORM_DATA, inherit_global_settings=False)
1074 1072 assert model.inherit_global_settings is False
1075 1073
1076 1074 def _patch_model(self, model):
1077 1075 return mock.patch.multiple(
1078 1076 model,
1079 1077 create_repo_svn_settings=mock.DEFAULT,
1080 1078 create_or_update_repo_hook_settings=mock.DEFAULT,
1081 1079 create_or_update_repo_pr_settings=mock.DEFAULT,
1082 1080 create_or_update_repo_hg_settings=mock.DEFAULT)
@@ -1,949 +1,949 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 'trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'ąć')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 125 pull_request, [(pull_request.author, ['author'], False, [])],
126 126 pull_request.author)
127 127 prs = PullRequestModel().get_awaiting_my_review(
128 128 pull_request.target_repo, user_id=pull_request.author.user_id)
129 129 assert isinstance(prs, list)
130 130 assert len(prs) == 1
131 131
132 132 def test_count_awaiting_my_review(self, pull_request):
133 133 PullRequestModel().update_reviewers(
134 134 pull_request, [(pull_request.author, ['author'], False, [])],
135 135 pull_request.author)
136 136 pr_count = PullRequestModel().count_awaiting_my_review(
137 137 pull_request.target_repo, user_id=pull_request.author.user_id)
138 138 assert pr_count == 1
139 139
140 140 def test_delete_calls_cleanup_merge(self, pull_request):
141 141 repo_id = pull_request.target_repo.repo_id
142 142 PullRequestModel().delete(pull_request, pull_request.author)
143 143
144 144 self.workspace_remove_mock.assert_called_once_with(
145 145 repo_id, self.workspace_id)
146 146
147 147 def test_close_calls_cleanup_and_hook(self, pull_request):
148 148 PullRequestModel().close_pull_request(
149 149 pull_request, pull_request.author)
150 150 repo_id = pull_request.target_repo.repo_id
151 151
152 152 self.workspace_remove_mock.assert_called_once_with(
153 153 repo_id, self.workspace_id)
154 154 self.hook_mock.assert_called_with(
155 155 self.pull_request, self.pull_request.author, 'close')
156 156
157 157 def test_merge_status(self, pull_request):
158 158 self.merge_mock.return_value = MergeResponse(
159 159 True, False, None, MergeFailureReason.NONE)
160 160
161 161 assert pull_request._last_merge_source_rev is None
162 162 assert pull_request._last_merge_target_rev is None
163 163 assert pull_request.last_merge_status is None
164 164
165 165 status, msg = PullRequestModel().merge_status(pull_request)
166 166 assert status is True
167 167 assert msg == 'This pull request can be automatically merged.'
168 168 self.merge_mock.assert_called_with(
169 169 self.repo_id, self.workspace_id,
170 170 pull_request.target_ref_parts,
171 171 pull_request.source_repo.scm_instance(),
172 172 pull_request.source_ref_parts, dry_run=True,
173 173 use_rebase=False, close_branch=False)
174 174
175 175 assert pull_request._last_merge_source_rev == self.source_commit
176 176 assert pull_request._last_merge_target_rev == self.target_commit
177 177 assert pull_request.last_merge_status is MergeFailureReason.NONE
178 178
179 179 self.merge_mock.reset_mock()
180 180 status, msg = PullRequestModel().merge_status(pull_request)
181 181 assert status is True
182 182 assert msg == 'This pull request can be automatically merged.'
183 183 assert self.merge_mock.called is False
184 184
185 185 def test_merge_status_known_failure(self, pull_request):
186 186 self.merge_mock.return_value = MergeResponse(
187 187 False, False, None, MergeFailureReason.MERGE_FAILED)
188 188
189 189 assert pull_request._last_merge_source_rev is None
190 190 assert pull_request._last_merge_target_rev is None
191 191 assert pull_request.last_merge_status is None
192 192
193 193 status, msg = PullRequestModel().merge_status(pull_request)
194 194 assert status is False
195 195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
196 196 self.merge_mock.assert_called_with(
197 197 self.repo_id, self.workspace_id,
198 198 pull_request.target_ref_parts,
199 199 pull_request.source_repo.scm_instance(),
200 200 pull_request.source_ref_parts, dry_run=True,
201 201 use_rebase=False, close_branch=False)
202 202
203 203 assert pull_request._last_merge_source_rev == self.source_commit
204 204 assert pull_request._last_merge_target_rev == self.target_commit
205 205 assert (
206 206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207 207
208 208 self.merge_mock.reset_mock()
209 209 status, msg = PullRequestModel().merge_status(pull_request)
210 210 assert status is False
211 211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
212 212 assert self.merge_mock.called is False
213 213
214 214 def test_merge_status_unknown_failure(self, pull_request):
215 215 self.merge_mock.return_value = MergeResponse(
216 216 False, False, None, MergeFailureReason.UNKNOWN,
217 217 metadata={'exception': 'MockError'})
218 218
219 219 assert pull_request._last_merge_source_rev is None
220 220 assert pull_request._last_merge_target_rev is None
221 221 assert pull_request.last_merge_status is None
222 222
223 223 status, msg = PullRequestModel().merge_status(pull_request)
224 224 assert status is False
225 225 assert msg == (
226 226 'This pull request cannot be merged because of an unhandled exception. '
227 227 'MockError')
228 228 self.merge_mock.assert_called_with(
229 229 self.repo_id, self.workspace_id,
230 230 pull_request.target_ref_parts,
231 231 pull_request.source_repo.scm_instance(),
232 232 pull_request.source_ref_parts, dry_run=True,
233 233 use_rebase=False, close_branch=False)
234 234
235 235 assert pull_request._last_merge_source_rev is None
236 236 assert pull_request._last_merge_target_rev is None
237 237 assert pull_request.last_merge_status is None
238 238
239 239 self.merge_mock.reset_mock()
240 240 status, msg = PullRequestModel().merge_status(pull_request)
241 241 assert status is False
242 242 assert msg == (
243 243 'This pull request cannot be merged because of an unhandled exception. '
244 244 'MockError')
245 245 assert self.merge_mock.called is True
246 246
247 247 def test_merge_status_when_target_is_locked(self, pull_request):
248 248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 249 status, msg = PullRequestModel().merge_status(pull_request)
250 250 assert status is False
251 251 assert msg == (
252 252 'This pull request cannot be merged because the target repository '
253 253 'is locked by user:1.')
254 254
255 255 def test_merge_status_requirements_check_target(self, pull_request):
256 256
257 257 def has_largefiles(self, repo):
258 258 return repo == pull_request.source_repo
259 259
260 260 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
261 261 with patcher:
262 262 status, msg = PullRequestModel().merge_status(pull_request)
263 263
264 264 assert status is False
265 265 assert msg == 'Target repository large files support is disabled.'
266 266
267 267 def test_merge_status_requirements_check_source(self, pull_request):
268 268
269 269 def has_largefiles(self, repo):
270 270 return repo == pull_request.target_repo
271 271
272 272 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
273 273 with patcher:
274 274 status, msg = PullRequestModel().merge_status(pull_request)
275 275
276 276 assert status is False
277 277 assert msg == 'Source repository large files support is disabled.'
278 278
279 279 def test_merge(self, pull_request, merge_extras):
280 280 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
281 281 merge_ref = Reference(
282 282 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
283 283 self.merge_mock.return_value = MergeResponse(
284 284 True, True, merge_ref, MergeFailureReason.NONE)
285 285
286 286 merge_extras['repository'] = pull_request.target_repo.repo_name
287 287 PullRequestModel().merge_repo(
288 288 pull_request, pull_request.author, extras=merge_extras)
289 289
290 290 message = (
291 291 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
292 292 u'\n\n {pr_title}'.format(
293 293 pr_id=pull_request.pull_request_id,
294 294 source_repo=safe_unicode(
295 295 pull_request.source_repo.scm_instance().name),
296 296 source_ref_name=pull_request.source_ref_parts.name,
297 297 pr_title=safe_unicode(pull_request.title)
298 298 )
299 299 )
300 300 self.merge_mock.assert_called_with(
301 301 self.repo_id, self.workspace_id,
302 302 pull_request.target_ref_parts,
303 303 pull_request.source_repo.scm_instance(),
304 304 pull_request.source_ref_parts,
305 305 user_name=user.short_contact, user_email=user.email, message=message,
306 306 use_rebase=False, close_branch=False
307 307 )
308 308 self.invalidation_mock.assert_called_once_with(
309 309 pull_request.target_repo.repo_name)
310 310
311 311 self.hook_mock.assert_called_with(
312 312 self.pull_request, self.pull_request.author, 'merge')
313 313
314 314 pull_request = PullRequest.get(pull_request.pull_request_id)
315 315 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
316 316
317 317 def test_merge_with_status_lock(self, pull_request, merge_extras):
318 318 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
319 319 merge_ref = Reference(
320 320 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 321 self.merge_mock.return_value = MergeResponse(
322 322 True, True, merge_ref, MergeFailureReason.NONE)
323 323
324 324 merge_extras['repository'] = pull_request.target_repo.repo_name
325 325
326 326 with pull_request.set_state(PullRequest.STATE_UPDATING):
327 327 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
328 328 PullRequestModel().merge_repo(
329 329 pull_request, pull_request.author, extras=merge_extras)
330 330
331 331 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
332 332
333 333 message = (
334 334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 335 u'\n\n {pr_title}'.format(
336 336 pr_id=pull_request.pull_request_id,
337 337 source_repo=safe_unicode(
338 338 pull_request.source_repo.scm_instance().name),
339 339 source_ref_name=pull_request.source_ref_parts.name,
340 340 pr_title=safe_unicode(pull_request.title)
341 341 )
342 342 )
343 343 self.merge_mock.assert_called_with(
344 344 self.repo_id, self.workspace_id,
345 345 pull_request.target_ref_parts,
346 346 pull_request.source_repo.scm_instance(),
347 347 pull_request.source_ref_parts,
348 348 user_name=user.short_contact, user_email=user.email, message=message,
349 349 use_rebase=False, close_branch=False
350 350 )
351 351 self.invalidation_mock.assert_called_once_with(
352 352 pull_request.target_repo.repo_name)
353 353
354 354 self.hook_mock.assert_called_with(
355 355 self.pull_request, self.pull_request.author, 'merge')
356 356
357 357 pull_request = PullRequest.get(pull_request.pull_request_id)
358 358 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
359 359
360 360 def test_merge_failed(self, pull_request, merge_extras):
361 361 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
362 362 merge_ref = Reference(
363 363 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
364 364 self.merge_mock.return_value = MergeResponse(
365 365 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
366 366
367 367 merge_extras['repository'] = pull_request.target_repo.repo_name
368 368 PullRequestModel().merge_repo(
369 369 pull_request, pull_request.author, extras=merge_extras)
370 370
371 371 message = (
372 372 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
373 373 u'\n\n {pr_title}'.format(
374 374 pr_id=pull_request.pull_request_id,
375 375 source_repo=safe_unicode(
376 376 pull_request.source_repo.scm_instance().name),
377 377 source_ref_name=pull_request.source_ref_parts.name,
378 378 pr_title=safe_unicode(pull_request.title)
379 379 )
380 380 )
381 381 self.merge_mock.assert_called_with(
382 382 self.repo_id, self.workspace_id,
383 383 pull_request.target_ref_parts,
384 384 pull_request.source_repo.scm_instance(),
385 385 pull_request.source_ref_parts,
386 386 user_name=user.short_contact, user_email=user.email, message=message,
387 387 use_rebase=False, close_branch=False
388 388 )
389 389
390 390 pull_request = PullRequest.get(pull_request.pull_request_id)
391 391 assert self.invalidation_mock.called is False
392 392 assert pull_request.merge_rev is None
393 393
394 394 def test_get_commit_ids(self, pull_request):
395 395 # The PR has been not merget yet, so expect an exception
396 396 with pytest.raises(ValueError):
397 397 PullRequestModel()._get_commit_ids(pull_request)
398 398
399 399 # Merge revision is in the revisions list
400 400 pull_request.merge_rev = pull_request.revisions[0]
401 401 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
402 402 assert commit_ids == pull_request.revisions
403 403
404 404 # Merge revision is not in the revisions list
405 405 pull_request.merge_rev = 'f000' * 10
406 406 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
407 407 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
408 408
409 409 def test_get_diff_from_pr_version(self, pull_request):
410 410 source_repo = pull_request.source_repo
411 411 source_ref_id = pull_request.source_ref_parts.commit_id
412 412 target_ref_id = pull_request.target_ref_parts.commit_id
413 413 diff = PullRequestModel()._get_diff_from_pr_or_version(
414 414 source_repo, source_ref_id, target_ref_id,
415 415 hide_whitespace_changes=False, diff_context=6)
416 416 assert 'file_1' in diff.raw
417 417
418 418 def test_generate_title_returns_unicode(self):
419 419 title = PullRequestModel().generate_pullrequest_title(
420 420 source='source-dummy',
421 421 source_ref='source-ref-dummy',
422 422 target='target-dummy',
423 423 )
424 424 assert type(title) == unicode
425 425
426 426
427 427 @pytest.mark.usefixtures('config_stub')
428 428 class TestIntegrationMerge(object):
429 429 @pytest.mark.parametrize('extra_config', (
430 430 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
431 431 ))
432 432 def test_merge_triggers_push_hooks(
433 433 self, pr_util, user_admin, capture_rcextensions, merge_extras,
434 434 extra_config):
435 435
436 436 pull_request = pr_util.create_pull_request(
437 437 approved=True, mergeable=True)
438 438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 439 merge_extras['repository'] = pull_request.target_repo.repo_name
440 440 Session().commit()
441 441
442 442 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
443 443 merge_state = PullRequestModel().merge_repo(
444 444 pull_request, user_admin, extras=merge_extras)
445 445
446 446 assert merge_state.executed
447 447 assert '_pre_push_hook' in capture_rcextensions
448 448 assert '_push_hook' in capture_rcextensions
449 449
450 450 def test_merge_can_be_rejected_by_pre_push_hook(
451 451 self, pr_util, user_admin, capture_rcextensions, merge_extras):
452 452 pull_request = pr_util.create_pull_request(
453 453 approved=True, mergeable=True)
454 454 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
455 455 merge_extras['repository'] = pull_request.target_repo.repo_name
456 456 Session().commit()
457 457
458 458 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
459 459 pre_pull.side_effect = RepositoryError("Disallow push!")
460 460 merge_status = PullRequestModel().merge_repo(
461 461 pull_request, user_admin, extras=merge_extras)
462 462
463 463 assert not merge_status.executed
464 464 assert 'pre_push' not in capture_rcextensions
465 465 assert 'post_push' not in capture_rcextensions
466 466
467 467 def test_merge_fails_if_target_is_locked(
468 468 self, pr_util, user_regular, merge_extras):
469 469 pull_request = pr_util.create_pull_request(
470 470 approved=True, mergeable=True)
471 471 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
472 472 pull_request.target_repo.locked = locked_by
473 473 # TODO: johbo: Check if this can work based on the database, currently
474 474 # all data is pre-computed, that's why just updating the DB is not
475 475 # enough.
476 476 merge_extras['locked_by'] = locked_by
477 477 merge_extras['repository'] = pull_request.target_repo.repo_name
478 478 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
479 479 Session().commit()
480 480 merge_status = PullRequestModel().merge_repo(
481 481 pull_request, user_regular, extras=merge_extras)
482 482 assert not merge_status.executed
483 483
484 484
485 485 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
486 486 (False, 1, 0),
487 487 (True, 0, 1),
488 488 ])
489 489 def test_outdated_comments(
490 490 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
491 491 pull_request = pr_util.create_pull_request()
492 492 pr_util.create_inline_comment(file_path='not_in_updated_diff')
493 493
494 494 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
495 495 pr_util.add_one_commit()
496 496 assert_inline_comments(
497 497 pull_request, visible=inlines_count, outdated=outdated_count)
498 498 outdated_comment_mock.assert_called_with(pull_request)
499 499
500 500
501 501 @pytest.mark.parametrize('mr_type, expected_msg', [
502 502 (MergeFailureReason.NONE,
503 503 'This pull request can be automatically merged.'),
504 504 (MergeFailureReason.UNKNOWN,
505 505 'This pull request cannot be merged because of an unhandled exception. CRASH'),
506 506 (MergeFailureReason.MERGE_FAILED,
507 507 'This pull request cannot be merged because of merge conflicts.'),
508 508 (MergeFailureReason.PUSH_FAILED,
509 509 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
510 510 (MergeFailureReason.TARGET_IS_NOT_HEAD,
511 511 'This pull request cannot be merged because the target `ref_name` is not a head.'),
512 512 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
513 513 'This pull request cannot be merged because the source contains more branches than the target.'),
514 514 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
515 'This pull request cannot be merged because the target has multiple heads: `a,b,c`.'),
515 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
516 516 (MergeFailureReason.TARGET_IS_LOCKED,
517 517 'This pull request cannot be merged because the target repository is locked by user:123.'),
518 518 (MergeFailureReason.MISSING_TARGET_REF,
519 519 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
520 520 (MergeFailureReason.MISSING_SOURCE_REF,
521 521 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
522 522 (MergeFailureReason.SUBREPO_MERGE_FAILED,
523 523 'This pull request cannot be merged because of conflicts related to sub repositories.'),
524 524
525 525 ])
526 526 def test_merge_response_message(mr_type, expected_msg):
527 527 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
528 528 metadata = {
529 529 'exception': "CRASH",
530 530 'target': 'some-repo',
531 531 'merge_commit': 'merge_commit',
532 532 'target_ref': merge_ref,
533 533 'source_ref': merge_ref,
534 534 'heads': ','.join(['a', 'b', 'c']),
535 535 'locked_by': 'user:123'}
536 536
537 537 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
538 538 assert merge_response.merge_status_message == expected_msg
539 539
540 540
541 541 @pytest.fixture
542 542 def merge_extras(user_regular):
543 543 """
544 544 Context for the vcs operation when running a merge.
545 545 """
546 546 extras = {
547 547 'ip': '127.0.0.1',
548 548 'username': user_regular.username,
549 549 'user_id': user_regular.user_id,
550 550 'action': 'push',
551 551 'repository': 'fake_target_repo_name',
552 552 'scm': 'git',
553 553 'config': 'fake_config_ini_path',
554 554 'repo_store': '',
555 555 'make_lock': None,
556 556 'locked_by': [None, None, None],
557 557 'server_url': 'http://test.example.com:5000',
558 558 'hooks': ['push', 'pull'],
559 559 'is_shadow_repo': False,
560 560 }
561 561 return extras
562 562
563 563
564 564 @pytest.mark.usefixtures('config_stub')
565 565 class TestUpdateCommentHandling(object):
566 566
567 567 @pytest.fixture(autouse=True, scope='class')
568 568 def enable_outdated_comments(self, request, baseapp):
569 569 config_patch = mock.patch.dict(
570 570 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
571 571 config_patch.start()
572 572
573 573 @request.addfinalizer
574 574 def cleanup():
575 575 config_patch.stop()
576 576
577 577 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
578 578 commits = [
579 579 {'message': 'a'},
580 580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
581 581 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
582 582 ]
583 583 pull_request = pr_util.create_pull_request(
584 584 commits=commits, target_head='a', source_head='b', revisions=['b'])
585 585 pr_util.create_inline_comment(file_path='file_b')
586 586 pr_util.add_one_commit(head='c')
587 587
588 588 assert_inline_comments(pull_request, visible=1, outdated=0)
589 589
590 590 def test_comment_stays_unflagged_on_change_above(self, pr_util):
591 591 original_content = ''.join(
592 592 ['line {}\n'.format(x) for x in range(1, 11)])
593 593 updated_content = 'new_line_at_top\n' + original_content
594 594 commits = [
595 595 {'message': 'a'},
596 596 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
597 597 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
598 598 ]
599 599 pull_request = pr_util.create_pull_request(
600 600 commits=commits, target_head='a', source_head='b', revisions=['b'])
601 601
602 602 with outdated_comments_patcher():
603 603 comment = pr_util.create_inline_comment(
604 604 line_no=u'n8', file_path='file_b')
605 605 pr_util.add_one_commit(head='c')
606 606
607 607 assert_inline_comments(pull_request, visible=1, outdated=0)
608 608 assert comment.line_no == u'n9'
609 609
610 610 def test_comment_stays_unflagged_on_change_below(self, pr_util):
611 611 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
612 612 updated_content = original_content + 'new_line_at_end\n'
613 613 commits = [
614 614 {'message': 'a'},
615 615 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
616 616 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
617 617 ]
618 618 pull_request = pr_util.create_pull_request(
619 619 commits=commits, target_head='a', source_head='b', revisions=['b'])
620 620 pr_util.create_inline_comment(file_path='file_b')
621 621 pr_util.add_one_commit(head='c')
622 622
623 623 assert_inline_comments(pull_request, visible=1, outdated=0)
624 624
625 625 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
626 626 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
627 627 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
628 628 change_lines = list(base_lines)
629 629 change_lines.insert(6, 'line 6a added\n')
630 630
631 631 # Changes on the last line of sight
632 632 update_lines = list(change_lines)
633 633 update_lines[0] = 'line 1 changed\n'
634 634 update_lines[-1] = 'line 12 changed\n'
635 635
636 636 def file_b(lines):
637 637 return FileNode('file_b', ''.join(lines))
638 638
639 639 commits = [
640 640 {'message': 'a', 'added': [file_b(base_lines)]},
641 641 {'message': 'b', 'changed': [file_b(change_lines)]},
642 642 {'message': 'c', 'changed': [file_b(update_lines)]},
643 643 ]
644 644
645 645 pull_request = pr_util.create_pull_request(
646 646 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 647 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
648 648
649 649 with outdated_comments_patcher():
650 650 pr_util.add_one_commit(head='c')
651 651 assert_inline_comments(pull_request, visible=0, outdated=1)
652 652
653 653 @pytest.mark.parametrize("change, content", [
654 654 ('changed', 'changed\n'),
655 655 ('removed', ''),
656 656 ], ids=['changed', 'removed'])
657 657 def test_comment_flagged_on_change(self, pr_util, change, content):
658 658 commits = [
659 659 {'message': 'a'},
660 660 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
661 661 {'message': 'c', change: [FileNode('file_b', content)]},
662 662 ]
663 663 pull_request = pr_util.create_pull_request(
664 664 commits=commits, target_head='a', source_head='b', revisions=['b'])
665 665 pr_util.create_inline_comment(file_path='file_b')
666 666
667 667 with outdated_comments_patcher():
668 668 pr_util.add_one_commit(head='c')
669 669 assert_inline_comments(pull_request, visible=0, outdated=1)
670 670
671 671
672 672 @pytest.mark.usefixtures('config_stub')
673 673 class TestUpdateChangedFiles(object):
674 674
675 675 def test_no_changes_on_unchanged_diff(self, pr_util):
676 676 commits = [
677 677 {'message': 'a'},
678 678 {'message': 'b',
679 679 'added': [FileNode('file_b', 'test_content b\n')]},
680 680 {'message': 'c',
681 681 'added': [FileNode('file_c', 'test_content c\n')]},
682 682 ]
683 683 # open a PR from a to b, adding file_b
684 684 pull_request = pr_util.create_pull_request(
685 685 commits=commits, target_head='a', source_head='b', revisions=['b'],
686 686 name_suffix='per-file-review')
687 687
688 688 # modify PR adding new file file_c
689 689 pr_util.add_one_commit(head='c')
690 690
691 691 assert_pr_file_changes(
692 692 pull_request,
693 693 added=['file_c'],
694 694 modified=[],
695 695 removed=[])
696 696
697 697 def test_modify_and_undo_modification_diff(self, pr_util):
698 698 commits = [
699 699 {'message': 'a'},
700 700 {'message': 'b',
701 701 'added': [FileNode('file_b', 'test_content b\n')]},
702 702 {'message': 'c',
703 703 'changed': [FileNode('file_b', 'test_content b modified\n')]},
704 704 {'message': 'd',
705 705 'changed': [FileNode('file_b', 'test_content b\n')]},
706 706 ]
707 707 # open a PR from a to b, adding file_b
708 708 pull_request = pr_util.create_pull_request(
709 709 commits=commits, target_head='a', source_head='b', revisions=['b'],
710 710 name_suffix='per-file-review')
711 711
712 712 # modify PR modifying file file_b
713 713 pr_util.add_one_commit(head='c')
714 714
715 715 assert_pr_file_changes(
716 716 pull_request,
717 717 added=[],
718 718 modified=['file_b'],
719 719 removed=[])
720 720
721 721 # move the head again to d, which rollbacks change,
722 722 # meaning we should indicate no changes
723 723 pr_util.add_one_commit(head='d')
724 724
725 725 assert_pr_file_changes(
726 726 pull_request,
727 727 added=[],
728 728 modified=[],
729 729 removed=[])
730 730
731 731 def test_updated_all_files_in_pr(self, pr_util):
732 732 commits = [
733 733 {'message': 'a'},
734 734 {'message': 'b', 'added': [
735 735 FileNode('file_a', 'test_content a\n'),
736 736 FileNode('file_b', 'test_content b\n'),
737 737 FileNode('file_c', 'test_content c\n')]},
738 738 {'message': 'c', 'changed': [
739 739 FileNode('file_a', 'test_content a changed\n'),
740 740 FileNode('file_b', 'test_content b changed\n'),
741 741 FileNode('file_c', 'test_content c changed\n')]},
742 742 ]
743 743 # open a PR from a to b, changing 3 files
744 744 pull_request = pr_util.create_pull_request(
745 745 commits=commits, target_head='a', source_head='b', revisions=['b'],
746 746 name_suffix='per-file-review')
747 747
748 748 pr_util.add_one_commit(head='c')
749 749
750 750 assert_pr_file_changes(
751 751 pull_request,
752 752 added=[],
753 753 modified=['file_a', 'file_b', 'file_c'],
754 754 removed=[])
755 755
756 756 def test_updated_and_removed_all_files_in_pr(self, pr_util):
757 757 commits = [
758 758 {'message': 'a'},
759 759 {'message': 'b', 'added': [
760 760 FileNode('file_a', 'test_content a\n'),
761 761 FileNode('file_b', 'test_content b\n'),
762 762 FileNode('file_c', 'test_content c\n')]},
763 763 {'message': 'c', 'removed': [
764 764 FileNode('file_a', 'test_content a changed\n'),
765 765 FileNode('file_b', 'test_content b changed\n'),
766 766 FileNode('file_c', 'test_content c changed\n')]},
767 767 ]
768 768 # open a PR from a to b, removing 3 files
769 769 pull_request = pr_util.create_pull_request(
770 770 commits=commits, target_head='a', source_head='b', revisions=['b'],
771 771 name_suffix='per-file-review')
772 772
773 773 pr_util.add_one_commit(head='c')
774 774
775 775 assert_pr_file_changes(
776 776 pull_request,
777 777 added=[],
778 778 modified=[],
779 779 removed=['file_a', 'file_b', 'file_c'])
780 780
781 781
782 782 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
783 783 model = PullRequestModel()
784 784 pull_request = pr_util.create_pull_request()
785 785 pr_util.update_source_repository()
786 786
787 787 model.update_commits(pull_request)
788 788
789 789 # Expect that it has a version entry now
790 790 assert len(model.get_versions(pull_request)) == 1
791 791
792 792
793 793 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
794 794 pull_request = pr_util.create_pull_request()
795 795 model = PullRequestModel()
796 796 model.update_commits(pull_request)
797 797
798 798 # Expect that it still has no versions
799 799 assert len(model.get_versions(pull_request)) == 0
800 800
801 801
802 802 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
803 803 model = PullRequestModel()
804 804 pull_request = pr_util.create_pull_request()
805 805 comment = pr_util.create_comment()
806 806 pr_util.update_source_repository()
807 807
808 808 model.update_commits(pull_request)
809 809
810 810 # Expect that the comment is linked to the pr version now
811 811 assert comment.pull_request_version == model.get_versions(pull_request)[0]
812 812
813 813
814 814 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
815 815 model = PullRequestModel()
816 816 pull_request = pr_util.create_pull_request()
817 817 pr_util.update_source_repository()
818 818 pr_util.update_source_repository()
819 819
820 820 model.update_commits(pull_request)
821 821
822 822 # Expect to find a new comment about the change
823 823 expected_message = textwrap.dedent(
824 824 """\
825 825 Pull request updated. Auto status change to |under_review|
826 826
827 827 .. role:: added
828 828 .. role:: removed
829 829 .. parsed-literal::
830 830
831 831 Changed commits:
832 832 * :added:`1 added`
833 833 * :removed:`0 removed`
834 834
835 835 Changed files:
836 836 * `A file_2 <#a_c--92ed3b5f07b4>`_
837 837
838 838 .. |under_review| replace:: *"Under Review"*"""
839 839 )
840 840 pull_request_comments = sorted(
841 841 pull_request.comments, key=lambda c: c.modified_at)
842 842 update_comment = pull_request_comments[-1]
843 843 assert update_comment.text == expected_message
844 844
845 845
846 846 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
847 847 pull_request = pr_util.create_pull_request()
848 848
849 849 # Avoiding default values
850 850 pull_request.status = PullRequest.STATUS_CLOSED
851 851 pull_request._last_merge_source_rev = "0" * 40
852 852 pull_request._last_merge_target_rev = "1" * 40
853 853 pull_request.last_merge_status = 1
854 854 pull_request.merge_rev = "2" * 40
855 855
856 856 # Remember automatic values
857 857 created_on = pull_request.created_on
858 858 updated_on = pull_request.updated_on
859 859
860 860 # Create a new version of the pull request
861 861 version = PullRequestModel()._create_version_from_snapshot(pull_request)
862 862
863 863 # Check attributes
864 864 assert version.title == pr_util.create_parameters['title']
865 865 assert version.description == pr_util.create_parameters['description']
866 866 assert version.status == PullRequest.STATUS_CLOSED
867 867
868 868 # versions get updated created_on
869 869 assert version.created_on != created_on
870 870
871 871 assert version.updated_on == updated_on
872 872 assert version.user_id == pull_request.user_id
873 873 assert version.revisions == pr_util.create_parameters['revisions']
874 874 assert version.source_repo == pr_util.source_repository
875 875 assert version.source_ref == pr_util.create_parameters['source_ref']
876 876 assert version.target_repo == pr_util.target_repository
877 877 assert version.target_ref == pr_util.create_parameters['target_ref']
878 878 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
879 879 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
880 880 assert version.last_merge_status == pull_request.last_merge_status
881 881 assert version.merge_rev == pull_request.merge_rev
882 882 assert version.pull_request == pull_request
883 883
884 884
885 885 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
886 886 version1 = pr_util.create_version_of_pull_request()
887 887 comment_linked = pr_util.create_comment(linked_to=version1)
888 888 comment_unlinked = pr_util.create_comment()
889 889 version2 = pr_util.create_version_of_pull_request()
890 890
891 891 PullRequestModel()._link_comments_to_version(version2)
892 892
893 893 # Expect that only the new comment is linked to version2
894 894 assert (
895 895 comment_unlinked.pull_request_version_id ==
896 896 version2.pull_request_version_id)
897 897 assert (
898 898 comment_linked.pull_request_version_id ==
899 899 version1.pull_request_version_id)
900 900 assert (
901 901 comment_unlinked.pull_request_version_id !=
902 902 comment_linked.pull_request_version_id)
903 903
904 904
905 905 def test_calculate_commits():
906 906 old_ids = [1, 2, 3]
907 907 new_ids = [1, 3, 4, 5]
908 908 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
909 909 assert change.added == [4, 5]
910 910 assert change.common == [1, 3]
911 911 assert change.removed == [2]
912 912 assert change.total == [1, 3, 4, 5]
913 913
914 914
915 915 def assert_inline_comments(pull_request, visible=None, outdated=None):
916 916 if visible is not None:
917 917 inline_comments = CommentsModel().get_inline_comments(
918 918 pull_request.target_repo.repo_id, pull_request=pull_request)
919 919 inline_cnt = CommentsModel().get_inline_comments_count(
920 920 inline_comments)
921 921 assert inline_cnt == visible
922 922 if outdated is not None:
923 923 outdated_comments = CommentsModel().get_outdated_comments(
924 924 pull_request.target_repo.repo_id, pull_request)
925 925 assert len(outdated_comments) == outdated
926 926
927 927
928 928 def assert_pr_file_changes(
929 929 pull_request, added=None, modified=None, removed=None):
930 930 pr_versions = PullRequestModel().get_versions(pull_request)
931 931 # always use first version, ie original PR to calculate changes
932 932 pull_request_version = pr_versions[0]
933 933 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
934 934 pull_request, pull_request_version)
935 935 file_changes = PullRequestModel()._calculate_file_changes(
936 936 old_diff_data, new_diff_data)
937 937
938 938 assert added == file_changes.added, \
939 939 'expected added:%s vs value:%s' % (added, file_changes.added)
940 940 assert modified == file_changes.modified, \
941 941 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
942 942 assert removed == file_changes.removed, \
943 943 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
944 944
945 945
946 946 def outdated_comments_patcher(use_outdated=True):
947 947 return mock.patch.object(
948 948 CommentsModel, 'use_outdated_comments',
949 949 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now