##// END OF EJS Templates
merge: merged default changes into new-ui
marcink -
r3638:c1c37b0b merge new-ui
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,41 b''
1 |RCE| 4.16.2 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2019-04-02
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18
19
20 Security
21 ^^^^^^^^
22
23
24
25 Performance
26 ^^^^^^^^^^^
27
28
29
30 Fixes
31 ^^^^^
32
33 - Integrations: fixed missing template variable for fork reference checks.
34 - Permissions: fixed server error when showing permissions for user groups.
35 - Pull requests: fixed a bug in removal of multiple reviewers at once.
36
37
38 Upgrade notes
39 ^^^^^^^^^^^^^
40
41 - Scheduled release addressing problems in 4.16.X releases.
@@ -0,0 +1,54 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4
5 from alembic.migration import MigrationContext
6 from alembic.operations import Operations
7 from sqlalchemy import String, Column
8 from sqlalchemy.sql import text
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import meta, init_model_encryption
12 from rhodecode.model.db import RepoGroup
13
14
15 log = logging.getLogger(__name__)
16
17
18 def upgrade(migrate_engine):
19 """
20 Upgrade operations go here.
21 Don't create your own engine; bind migrate_engine to your metadata
22 """
23 _reset_base(migrate_engine)
24 from rhodecode.lib.dbmigrate.schema import db_4_16_0_2
25
26 init_model_encryption(db_4_16_0_2)
27
28 context = MigrationContext.configure(migrate_engine.connect())
29 op = Operations(context)
30
31 repo_group = db_4_16_0_2.RepoGroup.__table__
32
33 with op.batch_alter_table(repo_group.name) as batch_op:
34 batch_op.add_column(
35 Column("repo_group_name_hash", String(1024), nullable=True, unique=False))
36
37 _generate_repo_group_name_hashes(db_4_16_0_2, op, meta.Session)
38
39
40 def downgrade(migrate_engine):
41 pass
42
43
44 def _generate_repo_group_name_hashes(models, op, session):
45 repo_groups = models.RepoGroup.get_all()
46 for repo_group in repo_groups:
47 print(repo_group.group_name)
48 hash_ = RepoGroup.hash_repo_group_name(repo_group.group_name)
49 params = {'hash': hash_, 'id': repo_group.group_id}
50 query = text(
51 'UPDATE groups SET repo_group_name_hash = :hash'
52 ' WHERE group_id = :id').bindparams(**params)
53 op.execute(query)
54 session().commit()
@@ -0,0 +1,39 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4
5 from alembic.migration import MigrationContext
6 from alembic.operations import Operations
7
8 from rhodecode.lib.dbmigrate.versions import _reset_base
9 from rhodecode.model import init_model_encryption
10
11
12 log = logging.getLogger(__name__)
13
14
15 def upgrade(migrate_engine):
16 """
17 Upgrade operations go here.
18 Don't create your own engine; bind migrate_engine to your metadata
19 """
20 _reset_base(migrate_engine)
21 from rhodecode.lib.dbmigrate.schema import db_4_16_0_2
22
23 init_model_encryption(db_4_16_0_2)
24
25 context = MigrationContext.configure(migrate_engine.connect())
26 op = Operations(context)
27
28 repo_group = db_4_16_0_2.RepoGroup.__table__
29
30 with op.batch_alter_table(repo_group.name) as batch_op:
31 batch_op.alter_column("repo_group_name_hash", nullable=False)
32
33
34 def downgrade(migrate_engine):
35 pass
36
37
38 def _generate_repo_group_name_hashes(models, op, session):
39 pass
@@ -1,53 +1,54 b''
1 1 1bd3e92b7e2e2d2024152b34bb88dff1db544a71 v4.0.0
2 2 170c5398320ea6cddd50955e88d408794c21d43a v4.0.1
3 3 c3fe200198f5aa34cf2e4066df2881a9cefe3704 v4.1.0
4 4 7fd5c850745e2ea821fb4406af5f4bff9b0a7526 v4.1.1
5 5 41c87da28a179953df86061d817bc35533c66dd2 v4.1.2
6 6 baaf9f5bcea3bae0ef12ae20c8b270482e62abb6 v4.2.0
7 7 32a70c7e56844a825f61df496ee5eaf8c3c4e189 v4.2.1
8 8 fa695cdb411d294679ac081d595ac654e5613b03 v4.3.0
9 9 0e4dc11b58cad833c513fe17bac39e6850edf959 v4.3.1
10 10 8a876f48f5cb1d018b837db28ff928500cb32cfb v4.4.0
11 11 8dd86b410b1aac086ffdfc524ef300f896af5047 v4.4.1
12 12 d2514226abc8d3b4f6fb57765f47d1b6fb360a05 v4.4.2
13 13 27d783325930af6dad2741476c0d0b1b7c8415c2 v4.5.0
14 14 7f2016f352abcbdba4a19d4039c386e9629449da v4.5.1
15 15 416fec799314c70a5c780fb28b3357b08869333a v4.5.2
16 16 27c3b85fafc83143e6678fbc3da69e1615bcac55 v4.6.0
17 17 5ad13deb9118c2a5243d4032d4d9cc174e5872db v4.6.1
18 18 2be921e01fa24bb102696ada596f87464c3666f6 v4.7.0
19 19 7198bdec29c2872c974431d55200d0398354cdb1 v4.7.1
20 20 bd1c8d230fe741c2dfd7100a0ef39fd0774fd581 v4.7.2
21 21 9731914f89765d9628dc4dddc84bc9402aa124c8 v4.8.0
22 22 c5a2b7d0e4bbdebc4a62d7b624befe375207b659 v4.9.0
23 23 d9aa3b27ac9f7e78359775c75fedf7bfece232f1 v4.9.1
24 24 4ba4d74981cec5d6b28b158f875a2540952c2f74 v4.10.0
25 25 0a6821cbd6b0b3c21503002f88800679fa35ab63 v4.10.1
26 26 434ad90ec8d621f4416074b84f6e9ce03964defb v4.10.2
27 27 68baee10e698da2724c6e0f698c03a6abb993bf2 v4.10.3
28 28 00821d3afd1dce3f4767cc353f84a17f7d5218a1 v4.10.4
29 29 22f6744ad8cc274311825f63f953e4dee2ea5cb9 v4.10.5
30 30 96eb24bea2f5f9258775245e3f09f6fa0a4dda01 v4.10.6
31 31 3121217a812c956d7dd5a5875821bd73e8002a32 v4.11.0
32 32 fa98b454715ac5b912f39e84af54345909a2a805 v4.11.1
33 33 3982abcfdcc229a723cebe52d3a9bcff10bba08e v4.11.2
34 34 33195f145db9172f0a8f1487e09207178a6ab065 v4.11.3
35 35 194c74f33e32bbae6fc4d71ec5a999cff3c13605 v4.11.4
36 36 8fbd8b0c3ddc2fa4ac9e4ca16942a03eb593df2d v4.11.5
37 37 f0609aa5d5d05a1ca2f97c3995542236131c9d8a v4.11.6
38 38 b5b30547d90d2e088472a70c84878f429ffbf40d v4.12.0
39 39 9072253aa8894d20c00b4a43dc61c2168c1eff94 v4.12.1
40 40 6a517543ea9ef9987d74371bd2a315eb0b232dc9 v4.12.2
41 41 7fc0731b024c3114be87865eda7ab621cc957e32 v4.12.3
42 42 6d531c0b068c6eda62dddceedc9f845ecb6feb6f v4.12.4
43 43 3d6bf2d81b1564830eb5e83396110d2a9a93eb1e v4.13.0
44 44 5468fc89e708bd90e413cd0d54350017abbdbc0e v4.13.1
45 45 610d621550521c314ee97b3d43473ac0bcf06fb8 v4.13.2
46 46 7dc62c090881fb5d03268141e71e0940d7c3295d v4.13.3
47 47 9151328c1c46b72ba6f00d7640d9141e75aa1ca2 v4.14.0
48 48 a47eeac5dfa41fa6779d90452affba4091c3ade8 v4.14.1
49 49 4b34ce0d2c3c10510626b3b65044939bb7a2cddf v4.15.0
50 50 14502561d22e6b70613674cd675ae9a604b7989f v4.15.1
51 51 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2
52 52 797744642eca86640ed20bef2cd77445780abaec v4.16.0
53 53 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1
54 5d8057df561c4b6b81b6401aed7d2f911e6e77f7 v4.16.2
@@ -1,130 +1,131 b''
1 1 .. _rhodecode-release-notes-ref:
2 2
3 3 Release Notes
4 4 =============
5 5
6 6 |RCE| 4.x Versions
7 7 ------------------
8 8
9 9 .. toctree::
10 10 :maxdepth: 1
11 11
12 release-notes-4.16.2.rst
12 13 release-notes-4.16.1.rst
13 14 release-notes-4.16.0.rst
14 15 release-notes-4.15.2.rst
15 16 release-notes-4.15.1.rst
16 17 release-notes-4.15.0.rst
17 18 release-notes-4.14.1.rst
18 19 release-notes-4.14.0.rst
19 20 release-notes-4.13.3.rst
20 21 release-notes-4.13.2.rst
21 22 release-notes-4.13.1.rst
22 23 release-notes-4.13.0.rst
23 24 release-notes-4.12.4.rst
24 25 release-notes-4.12.3.rst
25 26 release-notes-4.12.2.rst
26 27 release-notes-4.12.1.rst
27 28 release-notes-4.12.0.rst
28 29 release-notes-4.11.6.rst
29 30 release-notes-4.11.5.rst
30 31 release-notes-4.11.4.rst
31 32 release-notes-4.11.3.rst
32 33 release-notes-4.11.2.rst
33 34 release-notes-4.11.1.rst
34 35 release-notes-4.11.0.rst
35 36 release-notes-4.10.6.rst
36 37 release-notes-4.10.5.rst
37 38 release-notes-4.10.4.rst
38 39 release-notes-4.10.3.rst
39 40 release-notes-4.10.2.rst
40 41 release-notes-4.10.1.rst
41 42 release-notes-4.10.0.rst
42 43 release-notes-4.9.1.rst
43 44 release-notes-4.9.0.rst
44 45 release-notes-4.8.0.rst
45 46 release-notes-4.7.2.rst
46 47 release-notes-4.7.1.rst
47 48 release-notes-4.7.0.rst
48 49 release-notes-4.6.1.rst
49 50 release-notes-4.6.0.rst
50 51 release-notes-4.5.2.rst
51 52 release-notes-4.5.1.rst
52 53 release-notes-4.5.0.rst
53 54 release-notes-4.4.2.rst
54 55 release-notes-4.4.1.rst
55 56 release-notes-4.4.0.rst
56 57 release-notes-4.3.1.rst
57 58 release-notes-4.3.0.rst
58 59 release-notes-4.2.1.rst
59 60 release-notes-4.2.0.rst
60 61 release-notes-4.1.2.rst
61 62 release-notes-4.1.1.rst
62 63 release-notes-4.1.0.rst
63 64 release-notes-4.0.1.rst
64 65 release-notes-4.0.0.rst
65 66
66 67 |RCE| 3.x Versions
67 68 ------------------
68 69
69 70 .. toctree::
70 71 :maxdepth: 1
71 72
72 73 release-notes-3.8.4.rst
73 74 release-notes-3.8.3.rst
74 75 release-notes-3.8.2.rst
75 76 release-notes-3.8.1.rst
76 77 release-notes-3.8.0.rst
77 78 release-notes-3.7.1.rst
78 79 release-notes-3.7.0.rst
79 80 release-notes-3.6.1.rst
80 81 release-notes-3.6.0.rst
81 82 release-notes-3.5.2.rst
82 83 release-notes-3.5.1.rst
83 84 release-notes-3.5.0.rst
84 85 release-notes-3.4.1.rst
85 86 release-notes-3.4.0.rst
86 87 release-notes-3.3.4.rst
87 88 release-notes-3.3.3.rst
88 89 release-notes-3.3.2.rst
89 90 release-notes-3.3.1.rst
90 91 release-notes-3.3.0.rst
91 92 release-notes-3.2.3.rst
92 93 release-notes-3.2.2.rst
93 94 release-notes-3.2.1.rst
94 95 release-notes-3.2.0.rst
95 96 release-notes-3.1.1.rst
96 97 release-notes-3.1.0.rst
97 98 release-notes-3.0.2.rst
98 99 release-notes-3.0.1.rst
99 100 release-notes-3.0.0.rst
100 101
101 102 |RCE| 2.x Versions
102 103 ------------------
103 104
104 105 .. toctree::
105 106 :maxdepth: 1
106 107
107 108 release-notes-2.2.8.rst
108 109 release-notes-2.2.7.rst
109 110 release-notes-2.2.6.rst
110 111 release-notes-2.2.5.rst
111 112 release-notes-2.2.4.rst
112 113 release-notes-2.2.3.rst
113 114 release-notes-2.2.2.rst
114 115 release-notes-2.2.1.rst
115 116 release-notes-2.2.0.rst
116 117 release-notes-2.1.0.rst
117 118 release-notes-2.0.2.rst
118 119 release-notes-2.0.1.rst
119 120 release-notes-2.0.0.rst
120 121
121 122 |RCE| 1.x Versions
122 123 ------------------
123 124
124 125 .. toctree::
125 126 :maxdepth: 1
126 127
127 128 release-notes-1.7.2.rst
128 129 release-notes-1.7.1.rst
129 130 release-notes-1.7.0.rst
130 131 release-notes-1.6.0.rst
@@ -1,2361 +1,2361 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "alembic" = super.buildPythonPackage {
8 name = "alembic-1.0.8";
8 name = "alembic-1.0.9";
9 9 doCheck = false;
10 10 propagatedBuildInputs = [
11 11 self."sqlalchemy"
12 12 self."mako"
13 13 self."python-editor"
14 14 self."python-dateutil"
15 15 ];
16 16 src = fetchurl {
17 url = "https://files.pythonhosted.org/packages/d6/bb/ec1e21f2e303689ad2170eb47fc67df9ad4199ade6759a99474c4d3535c8/alembic-1.0.8.tar.gz";
18 sha256 = "1s34i1j0dsxbflxligwhnkf37a5hvcshsv8ibkcfdjf03ph42pah";
17 url = "https://files.pythonhosted.org/packages/fc/42/8729e2491fa9b8eae160d1cbb429f61712bfc2d779816488c25cfdabf7b8/alembic-1.0.9.tar.gz";
18 sha256 = "0a88rwp7fp0y8ykczj82ivr4ww1kiflcvb882lgfl9azm8csdfa0";
19 19 };
20 20 meta = {
21 21 license = [ pkgs.lib.licenses.mit ];
22 22 };
23 23 };
24 24 "amqp" = super.buildPythonPackage {
25 25 name = "amqp-2.3.1";
26 26 doCheck = false;
27 27 propagatedBuildInputs = [
28 28 self."vine"
29 29 ];
30 30 src = fetchurl {
31 31 url = "https://files.pythonhosted.org/packages/1b/32/242ff76cd802766f11c89c72f3389b5c8de4bdfbab406137b90c5fae8b05/amqp-2.3.1.tar.gz";
32 32 sha256 = "0wlfnvhmfrn7c8qif2jyvsm63ibdxp02ss564qwrvqfhz0di72s0";
33 33 };
34 34 meta = {
35 35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 36 };
37 37 };
38 38 "appenlight-client" = super.buildPythonPackage {
39 39 name = "appenlight-client-0.6.26";
40 40 doCheck = false;
41 41 propagatedBuildInputs = [
42 42 self."webob"
43 43 self."requests"
44 44 self."six"
45 45 ];
46 46 src = fetchurl {
47 47 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
48 48 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 "asn1crypto" = super.buildPythonPackage {
55 55 name = "asn1crypto-0.24.0";
56 56 doCheck = false;
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
59 59 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "atomicwrites" = super.buildPythonPackage {
66 66 name = "atomicwrites-1.2.1";
67 67 doCheck = false;
68 68 src = fetchurl {
69 69 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
70 70 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
71 71 };
72 72 meta = {
73 73 license = [ pkgs.lib.licenses.mit ];
74 74 };
75 75 };
76 76 "attrs" = super.buildPythonPackage {
77 77 name = "attrs-18.2.0";
78 78 doCheck = false;
79 79 src = fetchurl {
80 80 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
81 81 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
82 82 };
83 83 meta = {
84 84 license = [ pkgs.lib.licenses.mit ];
85 85 };
86 86 };
87 87 "authomatic" = super.buildPythonPackage {
88 88 name = "authomatic-0.1.0.post1";
89 89 doCheck = false;
90 90 src = fetchurl {
91 91 url = "https://code.rhodecode.com/upstream/authomatic/archive/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e.tar.gz?md5=3c68720a1322b25254009518d1ff6801";
92 92 sha256 = "1cgk0a86sbsjbri06gf5z5l4npwkjdxw6fdnwl4vvfmxs2sx9yxw";
93 93 };
94 94 meta = {
95 95 license = [ pkgs.lib.licenses.mit ];
96 96 };
97 97 };
98 98 "babel" = super.buildPythonPackage {
99 99 name = "babel-1.3";
100 100 doCheck = false;
101 101 propagatedBuildInputs = [
102 102 self."pytz"
103 103 ];
104 104 src = fetchurl {
105 105 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
106 106 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
107 107 };
108 108 meta = {
109 109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 110 };
111 111 };
112 112 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
113 113 name = "backports.shutil-get-terminal-size-1.0.0";
114 114 doCheck = false;
115 115 src = fetchurl {
116 116 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
117 117 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
118 118 };
119 119 meta = {
120 120 license = [ pkgs.lib.licenses.mit ];
121 121 };
122 122 };
123 123 "beaker" = super.buildPythonPackage {
124 124 name = "beaker-1.9.1";
125 125 doCheck = false;
126 126 propagatedBuildInputs = [
127 127 self."funcsigs"
128 128 ];
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
131 131 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.bsdOriginal ];
135 135 };
136 136 };
137 137 "beautifulsoup4" = super.buildPythonPackage {
138 138 name = "beautifulsoup4-4.6.3";
139 139 doCheck = false;
140 140 src = fetchurl {
141 141 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
142 142 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 "billiard" = super.buildPythonPackage {
149 149 name = "billiard-3.5.0.3";
150 150 doCheck = false;
151 151 src = fetchurl {
152 152 url = "https://files.pythonhosted.org/packages/39/ac/f5571210cca2e4f4532e38aaff242f26c8654c5e2436bee966c230647ccc/billiard-3.5.0.3.tar.gz";
153 153 sha256 = "1riwiiwgb141151md4ykx49qrz749akj5k8g290ji9bsqjyj4yqx";
154 154 };
155 155 meta = {
156 156 license = [ pkgs.lib.licenses.bsdOriginal ];
157 157 };
158 158 };
159 159 "bleach" = super.buildPythonPackage {
160 160 name = "bleach-3.1.0";
161 161 doCheck = false;
162 162 propagatedBuildInputs = [
163 163 self."six"
164 164 self."webencodings"
165 165 ];
166 166 src = fetchurl {
167 167 url = "https://files.pythonhosted.org/packages/78/5a/0df03e8735cd9c75167528299c738702437589b9c71a849489d00ffa82e8/bleach-3.1.0.tar.gz";
168 168 sha256 = "1yhrgrhkln8bd6gn3imj69g1h4xqah9gaz9q26crqr6gmmvpzprz";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.asl20 ];
172 172 };
173 173 };
174 174 "bumpversion" = super.buildPythonPackage {
175 175 name = "bumpversion-0.5.3";
176 176 doCheck = false;
177 177 src = fetchurl {
178 178 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
179 179 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
180 180 };
181 181 meta = {
182 182 license = [ pkgs.lib.licenses.mit ];
183 183 };
184 184 };
185 185 "celery" = super.buildPythonPackage {
186 186 name = "celery-4.1.1";
187 187 doCheck = false;
188 188 propagatedBuildInputs = [
189 189 self."pytz"
190 190 self."billiard"
191 191 self."kombu"
192 192 ];
193 193 src = fetchurl {
194 194 url = "https://files.pythonhosted.org/packages/e9/cf/a4c0597effca20c57eb586324e41d1180bc8f13a933da41e0646cff69f02/celery-4.1.1.tar.gz";
195 195 sha256 = "1xbir4vw42n2ir9lanhwl7w69zpmj7lbi66fxm2b7pyvkcss7wni";
196 196 };
197 197 meta = {
198 198 license = [ pkgs.lib.licenses.bsdOriginal ];
199 199 };
200 200 };
201 201 "cffi" = super.buildPythonPackage {
202 202 name = "cffi-1.12.2";
203 203 doCheck = false;
204 204 propagatedBuildInputs = [
205 205 self."pycparser"
206 206 ];
207 207 src = fetchurl {
208 208 url = "https://files.pythonhosted.org/packages/64/7c/27367b38e6cc3e1f49f193deb761fe75cda9f95da37b67b422e62281fcac/cffi-1.12.2.tar.gz";
209 209 sha256 = "19qfks2djya8vix95bmg3xzipjb8w9b8mbj4j5k2hqkc8j58f4z1";
210 210 };
211 211 meta = {
212 212 license = [ pkgs.lib.licenses.mit ];
213 213 };
214 214 };
215 215 "chameleon" = super.buildPythonPackage {
216 216 name = "chameleon-2.24";
217 217 doCheck = false;
218 218 src = fetchurl {
219 219 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
220 220 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
221 221 };
222 222 meta = {
223 223 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
224 224 };
225 225 };
226 226 "channelstream" = super.buildPythonPackage {
227 227 name = "channelstream-0.5.2";
228 228 doCheck = false;
229 229 propagatedBuildInputs = [
230 230 self."gevent"
231 231 self."ws4py"
232 232 self."pyramid"
233 233 self."pyramid-jinja2"
234 234 self."itsdangerous"
235 235 self."requests"
236 236 self."six"
237 237 ];
238 238 src = fetchurl {
239 239 url = "https://files.pythonhosted.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
240 240 sha256 = "1qbm4xdl5hfkja683x546bncg3rqq8qv79w1m1a1wd48cqqzb6rm";
241 241 };
242 242 meta = {
243 243 license = [ pkgs.lib.licenses.bsdOriginal ];
244 244 };
245 245 };
246 246 "click" = super.buildPythonPackage {
247 247 name = "click-7.0";
248 248 doCheck = false;
249 249 src = fetchurl {
250 250 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
251 251 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
252 252 };
253 253 meta = {
254 254 license = [ pkgs.lib.licenses.bsdOriginal ];
255 255 };
256 256 };
257 257 "colander" = super.buildPythonPackage {
258 258 name = "colander-1.7.0";
259 259 doCheck = false;
260 260 propagatedBuildInputs = [
261 261 self."translationstring"
262 262 self."iso8601"
263 263 self."enum34"
264 264 ];
265 265 src = fetchurl {
266 266 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
267 267 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
268 268 };
269 269 meta = {
270 270 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
271 271 };
272 272 };
273 273 "configobj" = super.buildPythonPackage {
274 274 name = "configobj-5.0.6";
275 275 doCheck = false;
276 276 propagatedBuildInputs = [
277 277 self."six"
278 278 ];
279 279 src = fetchurl {
280 280 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
281 281 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
282 282 };
283 283 meta = {
284 284 license = [ pkgs.lib.licenses.bsdOriginal ];
285 285 };
286 286 };
287 287 "configparser" = super.buildPythonPackage {
288 288 name = "configparser-3.7.4";
289 289 doCheck = false;
290 290 src = fetchurl {
291 291 url = "https://files.pythonhosted.org/packages/e2/1c/83fd53748d8245cb9a3399f705c251d3fc0ce7df04450aac1cfc49dd6a0f/configparser-3.7.4.tar.gz";
292 292 sha256 = "0xac32886ihs2xg7w1gppcq2sgin5qsm8lqwijs5xifq9w0x0q6s";
293 293 };
294 294 meta = {
295 295 license = [ pkgs.lib.licenses.mit ];
296 296 };
297 297 };
298 298 "cov-core" = super.buildPythonPackage {
299 299 name = "cov-core-1.15.0";
300 300 doCheck = false;
301 301 propagatedBuildInputs = [
302 302 self."coverage"
303 303 ];
304 304 src = fetchurl {
305 305 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
306 306 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
307 307 };
308 308 meta = {
309 309 license = [ pkgs.lib.licenses.mit ];
310 310 };
311 311 };
312 312 "coverage" = super.buildPythonPackage {
313 name = "coverage-4.5.1";
313 name = "coverage-4.5.3";
314 314 doCheck = false;
315 315 src = fetchurl {
316 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
317 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
316 url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz";
317 sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx";
318 318 };
319 319 meta = {
320 320 license = [ pkgs.lib.licenses.asl20 ];
321 321 };
322 322 };
323 323 "cryptography" = super.buildPythonPackage {
324 324 name = "cryptography-2.6.1";
325 325 doCheck = false;
326 326 propagatedBuildInputs = [
327 327 self."asn1crypto"
328 328 self."six"
329 329 self."cffi"
330 330 self."enum34"
331 331 self."ipaddress"
332 332 ];
333 333 src = fetchurl {
334 334 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
335 335 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
336 336 };
337 337 meta = {
338 338 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
339 339 };
340 340 };
341 341 "cssselect" = super.buildPythonPackage {
342 342 name = "cssselect-1.0.3";
343 343 doCheck = false;
344 344 src = fetchurl {
345 345 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
346 346 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
347 347 };
348 348 meta = {
349 349 license = [ pkgs.lib.licenses.bsdOriginal ];
350 350 };
351 351 };
352 352 "decorator" = super.buildPythonPackage {
353 353 name = "decorator-4.1.2";
354 354 doCheck = false;
355 355 src = fetchurl {
356 356 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
357 357 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
358 358 };
359 359 meta = {
360 360 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
361 361 };
362 362 };
363 363 "deform" = super.buildPythonPackage {
364 364 name = "deform-2.0.7";
365 365 doCheck = false;
366 366 propagatedBuildInputs = [
367 367 self."chameleon"
368 368 self."colander"
369 369 self."iso8601"
370 370 self."peppercorn"
371 371 self."translationstring"
372 372 self."zope.deprecation"
373 373 ];
374 374 src = fetchurl {
375 375 url = "https://files.pythonhosted.org/packages/cf/a1/bc234527b8f181de9acd80e796483c00007658d1e32b7de78f1c2e004d9a/deform-2.0.7.tar.gz";
376 376 sha256 = "0jnpi0zr2hjvbmiz6nm33yqv976dn9lf51vhlzqc0i75xcr9rwig";
377 377 };
378 378 meta = {
379 379 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
380 380 };
381 381 };
382 382 "defusedxml" = super.buildPythonPackage {
383 383 name = "defusedxml-0.5.0";
384 384 doCheck = false;
385 385 src = fetchurl {
386 386 url = "https://files.pythonhosted.org/packages/74/ba/4ba4e89e21b5a2e267d80736ea674609a0a33cc4435a6d748ef04f1f9374/defusedxml-0.5.0.tar.gz";
387 387 sha256 = "1x54n0h8hl92vvwyymx883fbqpqjwn2mc8fb383bcg3z9zwz5mr4";
388 388 };
389 389 meta = {
390 390 license = [ pkgs.lib.licenses.psfl ];
391 391 };
392 392 };
393 393 "dm.xmlsec.binding" = super.buildPythonPackage {
394 394 name = "dm.xmlsec.binding-1.3.7";
395 395 doCheck = false;
396 396 propagatedBuildInputs = [
397 397 self."setuptools"
398 398 self."lxml"
399 399 ];
400 400 src = fetchurl {
401 401 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
402 402 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.bsdOriginal ];
406 406 };
407 407 };
408 408 "docutils" = super.buildPythonPackage {
409 409 name = "docutils-0.14";
410 410 doCheck = false;
411 411 src = fetchurl {
412 412 url = "https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-0.14.tar.gz";
413 413 sha256 = "0x22fs3pdmr42kvz6c654756wja305qv6cx1zbhwlagvxgr4xrji";
414 414 };
415 415 meta = {
416 416 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
417 417 };
418 418 };
419 419 "dogpile.cache" = super.buildPythonPackage {
420 420 name = "dogpile.cache-0.7.1";
421 421 doCheck = false;
422 422 propagatedBuildInputs = [
423 423 self."decorator"
424 424 ];
425 425 src = fetchurl {
426 426 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
427 427 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
428 428 };
429 429 meta = {
430 430 license = [ pkgs.lib.licenses.bsdOriginal ];
431 431 };
432 432 };
433 433 "dogpile.core" = super.buildPythonPackage {
434 434 name = "dogpile.core-0.4.1";
435 435 doCheck = false;
436 436 src = fetchurl {
437 437 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
438 438 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
439 439 };
440 440 meta = {
441 441 license = [ pkgs.lib.licenses.bsdOriginal ];
442 442 };
443 443 };
444 444 "ecdsa" = super.buildPythonPackage {
445 445 name = "ecdsa-0.13";
446 446 doCheck = false;
447 447 src = fetchurl {
448 448 url = "https://files.pythonhosted.org/packages/f9/e5/99ebb176e47f150ac115ffeda5fedb6a3dbb3c00c74a59fd84ddf12f5857/ecdsa-0.13.tar.gz";
449 449 sha256 = "1yj31j0asmrx4an9xvsaj2icdmzy6pw0glfpqrrkrphwdpi1xkv4";
450 450 };
451 451 meta = {
452 452 license = [ pkgs.lib.licenses.mit ];
453 453 };
454 454 };
455 455 "elasticsearch" = super.buildPythonPackage {
456 456 name = "elasticsearch-6.3.1";
457 457 doCheck = false;
458 458 propagatedBuildInputs = [
459 459 self."urllib3"
460 460 ];
461 461 src = fetchurl {
462 462 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
463 463 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
464 464 };
465 465 meta = {
466 466 license = [ pkgs.lib.licenses.asl20 ];
467 467 };
468 468 };
469 469 "elasticsearch-dsl" = super.buildPythonPackage {
470 470 name = "elasticsearch-dsl-6.3.1";
471 471 doCheck = false;
472 472 propagatedBuildInputs = [
473 473 self."six"
474 474 self."python-dateutil"
475 475 self."elasticsearch"
476 476 self."ipaddress"
477 477 ];
478 478 src = fetchurl {
479 479 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
480 480 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
481 481 };
482 482 meta = {
483 483 license = [ pkgs.lib.licenses.asl20 ];
484 484 };
485 485 };
486 486 "elasticsearch1" = super.buildPythonPackage {
487 487 name = "elasticsearch1-1.10.0";
488 488 doCheck = false;
489 489 propagatedBuildInputs = [
490 490 self."urllib3"
491 491 ];
492 492 src = fetchurl {
493 493 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
494 494 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
495 495 };
496 496 meta = {
497 497 license = [ pkgs.lib.licenses.asl20 ];
498 498 };
499 499 };
500 500 "elasticsearch1-dsl" = super.buildPythonPackage {
501 501 name = "elasticsearch1-dsl-0.0.12";
502 502 doCheck = false;
503 503 propagatedBuildInputs = [
504 504 self."six"
505 505 self."python-dateutil"
506 506 self."elasticsearch1"
507 507 ];
508 508 src = fetchurl {
509 509 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
510 510 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
511 511 };
512 512 meta = {
513 513 license = [ pkgs.lib.licenses.asl20 ];
514 514 };
515 515 };
516 516 "elasticsearch2" = super.buildPythonPackage {
517 517 name = "elasticsearch2-2.5.0";
518 518 doCheck = false;
519 519 propagatedBuildInputs = [
520 520 self."urllib3"
521 521 ];
522 522 src = fetchurl {
523 523 url = "https://files.pythonhosted.org/packages/84/77/63cf63d4ba11d913b5278406f2a37b0712bec6fc85edfb6151a33eaeba25/elasticsearch2-2.5.0.tar.gz";
524 524 sha256 = "0ky0q16lbvz022yv6q3pix7aamf026p1y994537ccjf0p0dxnbxr";
525 525 };
526 526 meta = {
527 527 license = [ pkgs.lib.licenses.asl20 ];
528 528 };
529 529 };
530 530 "entrypoints" = super.buildPythonPackage {
531 531 name = "entrypoints-0.2.2";
532 532 doCheck = false;
533 533 propagatedBuildInputs = [
534 534 self."configparser"
535 535 ];
536 536 src = fetchurl {
537 537 url = "https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313";
538 538 sha256 = "0bihrdp8ahsys437kxdhk52gz6kib8rxjv71i93wkw7594fcaxll";
539 539 };
540 540 meta = {
541 541 license = [ pkgs.lib.licenses.mit ];
542 542 };
543 543 };
544 544 "enum34" = super.buildPythonPackage {
545 545 name = "enum34-1.1.6";
546 546 doCheck = false;
547 547 src = fetchurl {
548 548 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
549 549 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
550 550 };
551 551 meta = {
552 552 license = [ pkgs.lib.licenses.bsdOriginal ];
553 553 };
554 554 };
555 555 "formencode" = super.buildPythonPackage {
556 556 name = "formencode-1.2.4";
557 557 doCheck = false;
558 558 src = fetchurl {
559 559 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
560 560 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
561 561 };
562 562 meta = {
563 563 license = [ pkgs.lib.licenses.psfl ];
564 564 };
565 565 };
566 566 "funcsigs" = super.buildPythonPackage {
567 567 name = "funcsigs-1.0.2";
568 568 doCheck = false;
569 569 src = fetchurl {
570 570 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
571 571 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
572 572 };
573 573 meta = {
574 574 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
575 575 };
576 576 };
577 577 "functools32" = super.buildPythonPackage {
578 578 name = "functools32-3.2.3.post2";
579 579 doCheck = false;
580 580 src = fetchurl {
581 581 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
582 582 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
583 583 };
584 584 meta = {
585 585 license = [ pkgs.lib.licenses.psfl ];
586 586 };
587 587 };
588 588 "future" = super.buildPythonPackage {
589 589 name = "future-0.14.3";
590 590 doCheck = false;
591 591 src = fetchurl {
592 592 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
593 593 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
594 594 };
595 595 meta = {
596 596 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
597 597 };
598 598 };
599 599 "futures" = super.buildPythonPackage {
600 600 name = "futures-3.0.2";
601 601 doCheck = false;
602 602 src = fetchurl {
603 603 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
604 604 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
605 605 };
606 606 meta = {
607 607 license = [ pkgs.lib.licenses.bsdOriginal ];
608 608 };
609 609 };
610 610 "gevent" = super.buildPythonPackage {
611 611 name = "gevent-1.4.0";
612 612 doCheck = false;
613 613 propagatedBuildInputs = [
614 614 self."greenlet"
615 615 ];
616 616 src = fetchurl {
617 617 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
618 618 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
619 619 };
620 620 meta = {
621 621 license = [ pkgs.lib.licenses.mit ];
622 622 };
623 623 };
624 624 "gnureadline" = super.buildPythonPackage {
625 625 name = "gnureadline-6.3.8";
626 626 doCheck = false;
627 627 src = fetchurl {
628 628 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
629 629 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
630 630 };
631 631 meta = {
632 632 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
633 633 };
634 634 };
635 635 "gprof2dot" = super.buildPythonPackage {
636 636 name = "gprof2dot-2017.9.19";
637 637 doCheck = false;
638 638 src = fetchurl {
639 639 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
640 640 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
641 641 };
642 642 meta = {
643 643 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
644 644 };
645 645 };
646 646 "greenlet" = super.buildPythonPackage {
647 647 name = "greenlet-0.4.15";
648 648 doCheck = false;
649 649 src = fetchurl {
650 650 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
651 651 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
652 652 };
653 653 meta = {
654 654 license = [ pkgs.lib.licenses.mit ];
655 655 };
656 656 };
657 657 "gunicorn" = super.buildPythonPackage {
658 658 name = "gunicorn-19.9.0";
659 659 doCheck = false;
660 660 src = fetchurl {
661 661 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
662 662 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
663 663 };
664 664 meta = {
665 665 license = [ pkgs.lib.licenses.mit ];
666 666 };
667 667 };
668 668 "hupper" = super.buildPythonPackage {
669 669 name = "hupper-1.6.1";
670 670 doCheck = false;
671 671 src = fetchurl {
672 672 url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz";
673 673 sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy";
674 674 };
675 675 meta = {
676 676 license = [ pkgs.lib.licenses.mit ];
677 677 };
678 678 };
679 679 "infrae.cache" = super.buildPythonPackage {
680 680 name = "infrae.cache-1.0.1";
681 681 doCheck = false;
682 682 propagatedBuildInputs = [
683 683 self."beaker"
684 684 self."repoze.lru"
685 685 ];
686 686 src = fetchurl {
687 687 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
688 688 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
689 689 };
690 690 meta = {
691 691 license = [ pkgs.lib.licenses.zpl21 ];
692 692 };
693 693 };
694 694 "invoke" = super.buildPythonPackage {
695 695 name = "invoke-0.13.0";
696 696 doCheck = false;
697 697 src = fetchurl {
698 698 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
699 699 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
700 700 };
701 701 meta = {
702 702 license = [ pkgs.lib.licenses.bsdOriginal ];
703 703 };
704 704 };
705 705 "ipaddress" = super.buildPythonPackage {
706 706 name = "ipaddress-1.0.22";
707 707 doCheck = false;
708 708 src = fetchurl {
709 709 url = "https://files.pythonhosted.org/packages/97/8d/77b8cedcfbf93676148518036c6b1ce7f8e14bf07e95d7fd4ddcb8cc052f/ipaddress-1.0.22.tar.gz";
710 710 sha256 = "0b570bm6xqpjwqis15pvdy6lyvvzfndjvkynilcddjj5x98wfimi";
711 711 };
712 712 meta = {
713 713 license = [ pkgs.lib.licenses.psfl ];
714 714 };
715 715 };
716 716 "ipdb" = super.buildPythonPackage {
717 717 name = "ipdb-0.12";
718 718 doCheck = false;
719 719 propagatedBuildInputs = [
720 720 self."setuptools"
721 721 self."ipython"
722 722 ];
723 723 src = fetchurl {
724 724 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
725 725 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
726 726 };
727 727 meta = {
728 728 license = [ pkgs.lib.licenses.bsdOriginal ];
729 729 };
730 730 };
731 731 "ipython" = super.buildPythonPackage {
732 732 name = "ipython-5.1.0";
733 733 doCheck = false;
734 734 propagatedBuildInputs = [
735 735 self."setuptools"
736 736 self."decorator"
737 737 self."pickleshare"
738 738 self."simplegeneric"
739 739 self."traitlets"
740 740 self."prompt-toolkit"
741 741 self."pygments"
742 742 self."pexpect"
743 743 self."backports.shutil-get-terminal-size"
744 744 self."pathlib2"
745 745 self."pexpect"
746 746 ];
747 747 src = fetchurl {
748 748 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
749 749 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
750 750 };
751 751 meta = {
752 752 license = [ pkgs.lib.licenses.bsdOriginal ];
753 753 };
754 754 };
755 755 "ipython-genutils" = super.buildPythonPackage {
756 756 name = "ipython-genutils-0.2.0";
757 757 doCheck = false;
758 758 src = fetchurl {
759 759 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
760 760 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
761 761 };
762 762 meta = {
763 763 license = [ pkgs.lib.licenses.bsdOriginal ];
764 764 };
765 765 };
766 766 "iso8601" = super.buildPythonPackage {
767 767 name = "iso8601-0.1.12";
768 768 doCheck = false;
769 769 src = fetchurl {
770 770 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
771 771 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
772 772 };
773 773 meta = {
774 774 license = [ pkgs.lib.licenses.mit ];
775 775 };
776 776 };
777 777 "isodate" = super.buildPythonPackage {
778 778 name = "isodate-0.6.0";
779 779 doCheck = false;
780 780 propagatedBuildInputs = [
781 781 self."six"
782 782 ];
783 783 src = fetchurl {
784 784 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
785 785 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
786 786 };
787 787 meta = {
788 788 license = [ pkgs.lib.licenses.bsdOriginal ];
789 789 };
790 790 };
791 791 "itsdangerous" = super.buildPythonPackage {
792 792 name = "itsdangerous-0.24";
793 793 doCheck = false;
794 794 src = fetchurl {
795 795 url = "https://files.pythonhosted.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
796 796 sha256 = "06856q6x675ly542ig0plbqcyab6ksfzijlyf1hzhgg3sgwgrcyb";
797 797 };
798 798 meta = {
799 799 license = [ pkgs.lib.licenses.bsdOriginal ];
800 800 };
801 801 };
802 802 "jinja2" = super.buildPythonPackage {
803 803 name = "jinja2-2.9.6";
804 804 doCheck = false;
805 805 propagatedBuildInputs = [
806 806 self."markupsafe"
807 807 ];
808 808 src = fetchurl {
809 809 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
810 810 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
811 811 };
812 812 meta = {
813 813 license = [ pkgs.lib.licenses.bsdOriginal ];
814 814 };
815 815 };
816 816 "jsonschema" = super.buildPythonPackage {
817 817 name = "jsonschema-2.6.0";
818 818 doCheck = false;
819 819 propagatedBuildInputs = [
820 820 self."functools32"
821 821 ];
822 822 src = fetchurl {
823 823 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
824 824 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
825 825 };
826 826 meta = {
827 827 license = [ pkgs.lib.licenses.mit ];
828 828 };
829 829 };
830 830 "jupyter-client" = super.buildPythonPackage {
831 831 name = "jupyter-client-5.0.0";
832 832 doCheck = false;
833 833 propagatedBuildInputs = [
834 834 self."traitlets"
835 835 self."jupyter-core"
836 836 self."pyzmq"
837 837 self."python-dateutil"
838 838 ];
839 839 src = fetchurl {
840 840 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
841 841 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
842 842 };
843 843 meta = {
844 844 license = [ pkgs.lib.licenses.bsdOriginal ];
845 845 };
846 846 };
847 847 "jupyter-core" = super.buildPythonPackage {
848 848 name = "jupyter-core-4.4.0";
849 849 doCheck = false;
850 850 propagatedBuildInputs = [
851 851 self."traitlets"
852 852 ];
853 853 src = fetchurl {
854 854 url = "https://files.pythonhosted.org/packages/b6/2d/2804f4de3a95583f65e5dcb4d7c8c7183124882323758996e867f47e72af/jupyter_core-4.4.0.tar.gz";
855 855 sha256 = "1dy083rarba8prn9f9srxq3c7n7vyql02ycrqq306c40lr57aw5s";
856 856 };
857 857 meta = {
858 858 license = [ pkgs.lib.licenses.bsdOriginal ];
859 859 };
860 860 };
861 861 "kombu" = super.buildPythonPackage {
862 862 name = "kombu-4.2.1";
863 863 doCheck = false;
864 864 propagatedBuildInputs = [
865 865 self."amqp"
866 866 ];
867 867 src = fetchurl {
868 868 url = "https://files.pythonhosted.org/packages/39/9f/556b988833abede4a80dbd18b2bdf4e8ff4486dd482ed45da961347e8ed2/kombu-4.2.1.tar.gz";
869 869 sha256 = "10lh3hncvw67fz0k5vgbx3yh9gjfpqdlia1f13i28cgnc1nfrbc6";
870 870 };
871 871 meta = {
872 872 license = [ pkgs.lib.licenses.bsdOriginal ];
873 873 };
874 874 };
875 875 "lxml" = super.buildPythonPackage {
876 876 name = "lxml-4.2.5";
877 877 doCheck = false;
878 878 src = fetchurl {
879 879 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
880 880 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
881 881 };
882 882 meta = {
883 883 license = [ pkgs.lib.licenses.bsdOriginal ];
884 884 };
885 885 };
886 886 "mako" = super.buildPythonPackage {
887 887 name = "mako-1.0.7";
888 888 doCheck = false;
889 889 propagatedBuildInputs = [
890 890 self."markupsafe"
891 891 ];
892 892 src = fetchurl {
893 893 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
894 894 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
895 895 };
896 896 meta = {
897 897 license = [ pkgs.lib.licenses.mit ];
898 898 };
899 899 };
900 900 "markdown" = super.buildPythonPackage {
901 901 name = "markdown-2.6.11";
902 902 doCheck = false;
903 903 src = fetchurl {
904 904 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
905 905 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
906 906 };
907 907 meta = {
908 908 license = [ pkgs.lib.licenses.bsdOriginal ];
909 909 };
910 910 };
911 911 "markupsafe" = super.buildPythonPackage {
912 912 name = "markupsafe-1.1.0";
913 913 doCheck = false;
914 914 src = fetchurl {
915 915 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
916 916 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
917 917 };
918 918 meta = {
919 919 license = [ pkgs.lib.licenses.bsdOriginal ];
920 920 };
921 921 };
922 922 "meld3" = super.buildPythonPackage {
923 923 name = "meld3-1.0.2";
924 924 doCheck = false;
925 925 src = fetchurl {
926 926 url = "https://files.pythonhosted.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
927 927 sha256 = "0n4mkwlpsqnmn0dm0wm5hn9nkda0nafl0jdy5sdl5977znh59dzp";
928 928 };
929 929 meta = {
930 930 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
931 931 };
932 932 };
933 933 "mistune" = super.buildPythonPackage {
934 934 name = "mistune-0.8.4";
935 935 doCheck = false;
936 936 src = fetchurl {
937 937 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
938 938 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
939 939 };
940 940 meta = {
941 941 license = [ pkgs.lib.licenses.bsdOriginal ];
942 942 };
943 943 };
944 944 "mock" = super.buildPythonPackage {
945 945 name = "mock-1.0.1";
946 946 doCheck = false;
947 947 src = fetchurl {
948 948 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
949 949 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
950 950 };
951 951 meta = {
952 952 license = [ pkgs.lib.licenses.bsdOriginal ];
953 953 };
954 954 };
955 955 "more-itertools" = super.buildPythonPackage {
956 956 name = "more-itertools-5.0.0";
957 957 doCheck = false;
958 958 propagatedBuildInputs = [
959 959 self."six"
960 960 ];
961 961 src = fetchurl {
962 962 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
963 963 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
964 964 };
965 965 meta = {
966 966 license = [ pkgs.lib.licenses.mit ];
967 967 };
968 968 };
969 969 "msgpack-python" = super.buildPythonPackage {
970 970 name = "msgpack-python-0.5.6";
971 971 doCheck = false;
972 972 src = fetchurl {
973 973 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
974 974 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
975 975 };
976 976 meta = {
977 977 license = [ pkgs.lib.licenses.asl20 ];
978 978 };
979 979 };
980 980 "mysql-python" = super.buildPythonPackage {
981 981 name = "mysql-python-1.2.5";
982 982 doCheck = false;
983 983 src = fetchurl {
984 984 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
985 985 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
986 986 };
987 987 meta = {
988 988 license = [ pkgs.lib.licenses.gpl1 ];
989 989 };
990 990 };
991 991 "nbconvert" = super.buildPythonPackage {
992 992 name = "nbconvert-5.3.1";
993 993 doCheck = false;
994 994 propagatedBuildInputs = [
995 995 self."mistune"
996 996 self."jinja2"
997 997 self."pygments"
998 998 self."traitlets"
999 999 self."jupyter-core"
1000 1000 self."nbformat"
1001 1001 self."entrypoints"
1002 1002 self."bleach"
1003 1003 self."pandocfilters"
1004 1004 self."testpath"
1005 1005 ];
1006 1006 src = fetchurl {
1007 1007 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1008 1008 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1009 1009 };
1010 1010 meta = {
1011 1011 license = [ pkgs.lib.licenses.bsdOriginal ];
1012 1012 };
1013 1013 };
1014 1014 "nbformat" = super.buildPythonPackage {
1015 1015 name = "nbformat-4.4.0";
1016 1016 doCheck = false;
1017 1017 propagatedBuildInputs = [
1018 1018 self."ipython-genutils"
1019 1019 self."traitlets"
1020 1020 self."jsonschema"
1021 1021 self."jupyter-core"
1022 1022 ];
1023 1023 src = fetchurl {
1024 1024 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1025 1025 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1026 1026 };
1027 1027 meta = {
1028 1028 license = [ pkgs.lib.licenses.bsdOriginal ];
1029 1029 };
1030 1030 };
1031 1031 "packaging" = super.buildPythonPackage {
1032 1032 name = "packaging-15.2";
1033 1033 doCheck = false;
1034 1034 src = fetchurl {
1035 1035 url = "https://files.pythonhosted.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1036 1036 sha256 = "1zn60w84bxvw6wypffka18ca66pa1k2cfrq3cq8fnsfja5m3k4ng";
1037 1037 };
1038 1038 meta = {
1039 1039 license = [ pkgs.lib.licenses.asl20 ];
1040 1040 };
1041 1041 };
1042 1042 "pandocfilters" = super.buildPythonPackage {
1043 1043 name = "pandocfilters-1.4.2";
1044 1044 doCheck = false;
1045 1045 src = fetchurl {
1046 1046 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1047 1047 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1048 1048 };
1049 1049 meta = {
1050 1050 license = [ pkgs.lib.licenses.bsdOriginal ];
1051 1051 };
1052 1052 };
1053 1053 "paste" = super.buildPythonPackage {
1054 1054 name = "paste-3.0.8";
1055 1055 doCheck = false;
1056 1056 propagatedBuildInputs = [
1057 1057 self."six"
1058 1058 ];
1059 1059 src = fetchurl {
1060 1060 url = "https://files.pythonhosted.org/packages/66/65/e3acf1663438483c1f6ced0b6c6f3b90da9f0faacb0a6e2aa0f3f9f4b235/Paste-3.0.8.tar.gz";
1061 1061 sha256 = "05w1sh6ky4d7pmdb8nv82n13w22jcn3qsagg5ih3hjmbws9kkwf4";
1062 1062 };
1063 1063 meta = {
1064 1064 license = [ pkgs.lib.licenses.mit ];
1065 1065 };
1066 1066 };
1067 1067 "pastedeploy" = super.buildPythonPackage {
1068 1068 name = "pastedeploy-2.0.1";
1069 1069 doCheck = false;
1070 1070 src = fetchurl {
1071 1071 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
1072 1072 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
1073 1073 };
1074 1074 meta = {
1075 1075 license = [ pkgs.lib.licenses.mit ];
1076 1076 };
1077 1077 };
1078 1078 "pastescript" = super.buildPythonPackage {
1079 1079 name = "pastescript-3.1.0";
1080 1080 doCheck = false;
1081 1081 propagatedBuildInputs = [
1082 1082 self."paste"
1083 1083 self."pastedeploy"
1084 1084 self."six"
1085 1085 ];
1086 1086 src = fetchurl {
1087 1087 url = "https://files.pythonhosted.org/packages/9e/1d/14db1c283eb21a5d36b6ba1114c13b709629711e64acab653d9994fe346f/PasteScript-3.1.0.tar.gz";
1088 1088 sha256 = "02qcxjjr32ks7a6d4f533wl34ysc7yhwlrfcyqwqbzr52250v4fs";
1089 1089 };
1090 1090 meta = {
1091 1091 license = [ pkgs.lib.licenses.mit ];
1092 1092 };
1093 1093 };
1094 1094 "pathlib2" = super.buildPythonPackage {
1095 1095 name = "pathlib2-2.3.3";
1096 1096 doCheck = false;
1097 1097 propagatedBuildInputs = [
1098 1098 self."six"
1099 1099 self."scandir"
1100 1100 ];
1101 1101 src = fetchurl {
1102 1102 url = "https://files.pythonhosted.org/packages/bf/d7/a2568f4596b75d2c6e2b4094a7e64f620decc7887f69a1f2811931ea15b9/pathlib2-2.3.3.tar.gz";
1103 1103 sha256 = "0hpp92vqqgcd8h92msm9slv161b1q160igjwnkf2ag6cx0c96695";
1104 1104 };
1105 1105 meta = {
1106 1106 license = [ pkgs.lib.licenses.mit ];
1107 1107 };
1108 1108 };
1109 1109 "peppercorn" = super.buildPythonPackage {
1110 1110 name = "peppercorn-0.6";
1111 1111 doCheck = false;
1112 1112 src = fetchurl {
1113 1113 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1114 1114 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1115 1115 };
1116 1116 meta = {
1117 1117 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1118 1118 };
1119 1119 };
1120 1120 "pexpect" = super.buildPythonPackage {
1121 name = "pexpect-4.6.0";
1121 name = "pexpect-4.7.0";
1122 1122 doCheck = false;
1123 1123 propagatedBuildInputs = [
1124 1124 self."ptyprocess"
1125 1125 ];
1126 1126 src = fetchurl {
1127 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
1128 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
1127 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
1128 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
1129 1129 };
1130 1130 meta = {
1131 1131 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1132 1132 };
1133 1133 };
1134 1134 "pickleshare" = super.buildPythonPackage {
1135 1135 name = "pickleshare-0.7.5";
1136 1136 doCheck = false;
1137 1137 propagatedBuildInputs = [
1138 1138 self."pathlib2"
1139 1139 ];
1140 1140 src = fetchurl {
1141 1141 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1142 1142 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1143 1143 };
1144 1144 meta = {
1145 1145 license = [ pkgs.lib.licenses.mit ];
1146 1146 };
1147 1147 };
1148 1148 "plaster" = super.buildPythonPackage {
1149 1149 name = "plaster-1.0";
1150 1150 doCheck = false;
1151 1151 propagatedBuildInputs = [
1152 1152 self."setuptools"
1153 1153 ];
1154 1154 src = fetchurl {
1155 1155 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1156 1156 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1157 1157 };
1158 1158 meta = {
1159 1159 license = [ pkgs.lib.licenses.mit ];
1160 1160 };
1161 1161 };
1162 1162 "plaster-pastedeploy" = super.buildPythonPackage {
1163 name = "plaster-pastedeploy-0.6";
1163 name = "plaster-pastedeploy-0.7";
1164 1164 doCheck = false;
1165 1165 propagatedBuildInputs = [
1166 1166 self."pastedeploy"
1167 1167 self."plaster"
1168 1168 ];
1169 1169 src = fetchurl {
1170 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
1171 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
1170 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1171 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1172 1172 };
1173 1173 meta = {
1174 1174 license = [ pkgs.lib.licenses.mit ];
1175 1175 };
1176 1176 };
1177 1177 "pluggy" = super.buildPythonPackage {
1178 1178 name = "pluggy-0.9.0";
1179 1179 doCheck = false;
1180 1180 src = fetchurl {
1181 1181 url = "https://files.pythonhosted.org/packages/a7/8c/55c629849c64e665258d8976322dfdad171fa2f57117590662d8a67618a4/pluggy-0.9.0.tar.gz";
1182 1182 sha256 = "13yg2q0wgcb4l8lgdvcnzqa8db5lrw3nwn50lxjy1z5jkp7gkv0r";
1183 1183 };
1184 1184 meta = {
1185 1185 license = [ pkgs.lib.licenses.mit ];
1186 1186 };
1187 1187 };
1188 1188 "prompt-toolkit" = super.buildPythonPackage {
1189 name = "prompt-toolkit-1.0.15";
1189 name = "prompt-toolkit-1.0.16";
1190 1190 doCheck = false;
1191 1191 propagatedBuildInputs = [
1192 1192 self."six"
1193 1193 self."wcwidth"
1194 1194 ];
1195 1195 src = fetchurl {
1196 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
1197 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
1196 url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz";
1197 sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1";
1198 1198 };
1199 1199 meta = {
1200 1200 license = [ pkgs.lib.licenses.bsdOriginal ];
1201 1201 };
1202 1202 };
1203 1203 "psutil" = super.buildPythonPackage {
1204 1204 name = "psutil-5.5.1";
1205 1205 doCheck = false;
1206 1206 src = fetchurl {
1207 1207 url = "https://files.pythonhosted.org/packages/c7/01/7c30b247cdc5ba29623faa5c8cf1f1bbf7e041783c340414b0ed7e067c64/psutil-5.5.1.tar.gz";
1208 1208 sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj";
1209 1209 };
1210 1210 meta = {
1211 1211 license = [ pkgs.lib.licenses.bsdOriginal ];
1212 1212 };
1213 1213 };
1214 1214 "psycopg2" = super.buildPythonPackage {
1215 name = "psycopg2-2.7.7";
1215 name = "psycopg2-2.8.2";
1216 1216 doCheck = false;
1217 1217 src = fetchurl {
1218 url = "https://files.pythonhosted.org/packages/63/54/c039eb0f46f9a9406b59a638415c2012ad7be9b4b97bfddb1f48c280df3a/psycopg2-2.7.7.tar.gz";
1219 sha256 = "0zjbabb4qjx9dm07imhf8y5a9rpa06d5zah80myiimgdi83nslpl";
1218 url = "https://files.pythonhosted.org/packages/23/7e/93c325482c328619870b6cd09370f6dbe1148283daca65115cd63642e60f/psycopg2-2.8.2.tar.gz";
1219 sha256 = "122mn2z3r0zgs8jyq682jjjr6vq5690qmxqf22gj6g41dwdz5b2w";
1220 1220 };
1221 1221 meta = {
1222 1222 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1223 1223 };
1224 1224 };
1225 1225 "ptyprocess" = super.buildPythonPackage {
1226 1226 name = "ptyprocess-0.6.0";
1227 1227 doCheck = false;
1228 1228 src = fetchurl {
1229 1229 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1230 1230 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1231 1231 };
1232 1232 meta = {
1233 1233 license = [ ];
1234 1234 };
1235 1235 };
1236 1236 "py" = super.buildPythonPackage {
1237 1237 name = "py-1.6.0";
1238 1238 doCheck = false;
1239 1239 src = fetchurl {
1240 1240 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
1241 1241 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
1242 1242 };
1243 1243 meta = {
1244 1244 license = [ pkgs.lib.licenses.mit ];
1245 1245 };
1246 1246 };
1247 1247 "py-bcrypt" = super.buildPythonPackage {
1248 1248 name = "py-bcrypt-0.4";
1249 1249 doCheck = false;
1250 1250 src = fetchurl {
1251 1251 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1252 1252 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1253 1253 };
1254 1254 meta = {
1255 1255 license = [ pkgs.lib.licenses.bsdOriginal ];
1256 1256 };
1257 1257 };
1258 1258 "py-gfm" = super.buildPythonPackage {
1259 1259 name = "py-gfm-0.1.4";
1260 1260 doCheck = false;
1261 1261 propagatedBuildInputs = [
1262 1262 self."setuptools"
1263 1263 self."markdown"
1264 1264 ];
1265 1265 src = fetchurl {
1266 1266 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1267 1267 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1268 1268 };
1269 1269 meta = {
1270 1270 license = [ pkgs.lib.licenses.bsdOriginal ];
1271 1271 };
1272 1272 };
1273 1273 "pyasn1" = super.buildPythonPackage {
1274 1274 name = "pyasn1-0.4.5";
1275 1275 doCheck = false;
1276 1276 src = fetchurl {
1277 1277 url = "https://files.pythonhosted.org/packages/46/60/b7e32f6ff481b8a1f6c8f02b0fd9b693d1c92ddd2efb038ec050d99a7245/pyasn1-0.4.5.tar.gz";
1278 1278 sha256 = "1xqh3jh2nfi2bflk5a0vn59y3pp1vn54f3ksx652sid92gz2096s";
1279 1279 };
1280 1280 meta = {
1281 1281 license = [ pkgs.lib.licenses.bsdOriginal ];
1282 1282 };
1283 1283 };
1284 1284 "pyasn1-modules" = super.buildPythonPackage {
1285 1285 name = "pyasn1-modules-0.2.4";
1286 1286 doCheck = false;
1287 1287 propagatedBuildInputs = [
1288 1288 self."pyasn1"
1289 1289 ];
1290 1290 src = fetchurl {
1291 1291 url = "https://files.pythonhosted.org/packages/bd/a5/ef7bf693e8a8f015386c9167483199f54f8a8ec01d1c737e05524f16e792/pyasn1-modules-0.2.4.tar.gz";
1292 1292 sha256 = "0z3w5dqrrvdplg9ma45j8n23xvyrj9ki8mg4ibqbn7l4qpl90855";
1293 1293 };
1294 1294 meta = {
1295 1295 license = [ pkgs.lib.licenses.bsdOriginal ];
1296 1296 };
1297 1297 };
1298 1298 "pycparser" = super.buildPythonPackage {
1299 1299 name = "pycparser-2.19";
1300 1300 doCheck = false;
1301 1301 src = fetchurl {
1302 1302 url = "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz";
1303 1303 sha256 = "1cr5dcj9628lkz1qlwq3fv97c25363qppkmcayqvd05dpy573259";
1304 1304 };
1305 1305 meta = {
1306 1306 license = [ pkgs.lib.licenses.bsdOriginal ];
1307 1307 };
1308 1308 };
1309 1309 "pycrypto" = super.buildPythonPackage {
1310 1310 name = "pycrypto-2.6.1";
1311 1311 doCheck = false;
1312 1312 src = fetchurl {
1313 1313 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1314 1314 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1315 1315 };
1316 1316 meta = {
1317 1317 license = [ pkgs.lib.licenses.publicDomain ];
1318 1318 };
1319 1319 };
1320 1320 "pycurl" = super.buildPythonPackage {
1321 1321 name = "pycurl-7.43.0.2";
1322 1322 doCheck = false;
1323 1323 src = fetchurl {
1324 1324 url = "https://files.pythonhosted.org/packages/e8/e4/0dbb8735407189f00b33d84122b9be52c790c7c3b25286826f4e1bdb7bde/pycurl-7.43.0.2.tar.gz";
1325 1325 sha256 = "1915kb04k1j4y6k1dx1sgnbddxrl9r1n4q928if2lkrdm73xy30g";
1326 1326 };
1327 1327 meta = {
1328 1328 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1329 1329 };
1330 1330 };
1331 1331 "pygments" = super.buildPythonPackage {
1332 1332 name = "pygments-2.3.1";
1333 1333 doCheck = false;
1334 1334 src = fetchurl {
1335 1335 url = "https://files.pythonhosted.org/packages/64/69/413708eaf3a64a6abb8972644e0f20891a55e621c6759e2c3f3891e05d63/Pygments-2.3.1.tar.gz";
1336 1336 sha256 = "0ji87g09jph8jqcvclgb02qvxasdnr9pzvk90rl66d90yqcxmyjz";
1337 1337 };
1338 1338 meta = {
1339 1339 license = [ pkgs.lib.licenses.bsdOriginal ];
1340 1340 };
1341 1341 };
1342 1342 "pymysql" = super.buildPythonPackage {
1343 1343 name = "pymysql-0.8.1";
1344 1344 doCheck = false;
1345 1345 src = fetchurl {
1346 1346 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1347 1347 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1348 1348 };
1349 1349 meta = {
1350 1350 license = [ pkgs.lib.licenses.mit ];
1351 1351 };
1352 1352 };
1353 1353 "pyotp" = super.buildPythonPackage {
1354 1354 name = "pyotp-2.2.7";
1355 1355 doCheck = false;
1356 1356 src = fetchurl {
1357 1357 url = "https://files.pythonhosted.org/packages/b1/ab/477cda97b6ca7baced5106471cb1ac1fe698d1b035983b9f8ee3422989eb/pyotp-2.2.7.tar.gz";
1358 1358 sha256 = "00p69nw431f0s2ilg0hnd77p1l22m06p9rq4f8zfapmavnmzw3xy";
1359 1359 };
1360 1360 meta = {
1361 1361 license = [ pkgs.lib.licenses.mit ];
1362 1362 };
1363 1363 };
1364 1364 "pyparsing" = super.buildPythonPackage {
1365 1365 name = "pyparsing-2.3.0";
1366 1366 doCheck = false;
1367 1367 src = fetchurl {
1368 1368 url = "https://files.pythonhosted.org/packages/d0/09/3e6a5eeb6e04467b737d55f8bba15247ac0876f98fae659e58cd744430c6/pyparsing-2.3.0.tar.gz";
1369 1369 sha256 = "14k5v7n3xqw8kzf42x06bzp184spnlkya2dpjyflax6l3yrallzk";
1370 1370 };
1371 1371 meta = {
1372 1372 license = [ pkgs.lib.licenses.mit ];
1373 1373 };
1374 1374 };
1375 1375 "pyramid" = super.buildPythonPackage {
1376 name = "pyramid-1.10.2";
1376 name = "pyramid-1.10.4";
1377 1377 doCheck = false;
1378 1378 propagatedBuildInputs = [
1379 1379 self."hupper"
1380 1380 self."plaster"
1381 1381 self."plaster-pastedeploy"
1382 1382 self."setuptools"
1383 1383 self."translationstring"
1384 1384 self."venusian"
1385 1385 self."webob"
1386 1386 self."zope.deprecation"
1387 1387 self."zope.interface"
1388 1388 self."repoze.lru"
1389 1389 ];
1390 1390 src = fetchurl {
1391 url = "https://files.pythonhosted.org/packages/bc/0e/73de9b189ff00a963beeedaff90e27b134eedf2806279a1a3fe122fd65b6/pyramid-1.10.2.tar.gz";
1392 sha256 = "0gn6sw6ml67ir150ffivc0ad5hd448p43p9z2bkyp12jh2n9n2p7";
1391 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1392 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1393 1393 };
1394 1394 meta = {
1395 1395 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1396 1396 };
1397 1397 };
1398 1398 "pyramid-beaker" = super.buildPythonPackage {
1399 1399 name = "pyramid-beaker-0.8";
1400 1400 doCheck = false;
1401 1401 propagatedBuildInputs = [
1402 1402 self."pyramid"
1403 1403 self."beaker"
1404 1404 ];
1405 1405 src = fetchurl {
1406 1406 url = "https://files.pythonhosted.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1407 1407 sha256 = "0hflx3qkcdml1mwpq53sz46s7jickpfn0zy0ns2c7j445j66bp3p";
1408 1408 };
1409 1409 meta = {
1410 1410 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1411 1411 };
1412 1412 };
1413 1413 "pyramid-debugtoolbar" = super.buildPythonPackage {
1414 1414 name = "pyramid-debugtoolbar-4.5";
1415 1415 doCheck = false;
1416 1416 propagatedBuildInputs = [
1417 1417 self."pyramid"
1418 1418 self."pyramid-mako"
1419 1419 self."repoze.lru"
1420 1420 self."pygments"
1421 1421 self."ipaddress"
1422 1422 ];
1423 1423 src = fetchurl {
1424 1424 url = "https://files.pythonhosted.org/packages/14/28/1f240239af340d19ee271ac62958158c79edb01a44ad8c9885508dd003d2/pyramid_debugtoolbar-4.5.tar.gz";
1425 1425 sha256 = "0x2p3409pnx66n6dx5vc0mk2r1cp1ydr8mp120w44r9pwcngbibl";
1426 1426 };
1427 1427 meta = {
1428 1428 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1429 1429 };
1430 1430 };
1431 1431 "pyramid-jinja2" = super.buildPythonPackage {
1432 1432 name = "pyramid-jinja2-2.7";
1433 1433 doCheck = false;
1434 1434 propagatedBuildInputs = [
1435 1435 self."pyramid"
1436 1436 self."zope.deprecation"
1437 1437 self."jinja2"
1438 1438 self."markupsafe"
1439 1439 ];
1440 1440 src = fetchurl {
1441 1441 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1442 1442 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1443 1443 };
1444 1444 meta = {
1445 1445 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1446 1446 };
1447 1447 };
1448 1448 "pyramid-mailer" = super.buildPythonPackage {
1449 1449 name = "pyramid-mailer-0.15.1";
1450 1450 doCheck = false;
1451 1451 propagatedBuildInputs = [
1452 1452 self."pyramid"
1453 1453 self."repoze.sendmail"
1454 1454 self."transaction"
1455 1455 ];
1456 1456 src = fetchurl {
1457 1457 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1458 1458 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1459 1459 };
1460 1460 meta = {
1461 1461 license = [ pkgs.lib.licenses.bsdOriginal ];
1462 1462 };
1463 1463 };
1464 1464 "pyramid-mako" = super.buildPythonPackage {
1465 1465 name = "pyramid-mako-1.0.2";
1466 1466 doCheck = false;
1467 1467 propagatedBuildInputs = [
1468 1468 self."pyramid"
1469 1469 self."mako"
1470 1470 ];
1471 1471 src = fetchurl {
1472 1472 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1473 1473 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
1474 1474 };
1475 1475 meta = {
1476 1476 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1477 1477 };
1478 1478 };
1479 1479 "pysqlite" = super.buildPythonPackage {
1480 1480 name = "pysqlite-2.8.3";
1481 1481 doCheck = false;
1482 1482 src = fetchurl {
1483 1483 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1484 1484 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1485 1485 };
1486 1486 meta = {
1487 1487 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1488 1488 };
1489 1489 };
1490 1490 "pytest" = super.buildPythonPackage {
1491 1491 name = "pytest-3.8.2";
1492 1492 doCheck = false;
1493 1493 propagatedBuildInputs = [
1494 1494 self."py"
1495 1495 self."six"
1496 1496 self."setuptools"
1497 1497 self."attrs"
1498 1498 self."more-itertools"
1499 1499 self."atomicwrites"
1500 1500 self."pluggy"
1501 1501 self."funcsigs"
1502 1502 self."pathlib2"
1503 1503 ];
1504 1504 src = fetchurl {
1505 1505 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
1506 1506 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
1507 1507 };
1508 1508 meta = {
1509 1509 license = [ pkgs.lib.licenses.mit ];
1510 1510 };
1511 1511 };
1512 1512 "pytest-cov" = super.buildPythonPackage {
1513 1513 name = "pytest-cov-2.6.0";
1514 1514 doCheck = false;
1515 1515 propagatedBuildInputs = [
1516 1516 self."pytest"
1517 1517 self."coverage"
1518 1518 ];
1519 1519 src = fetchurl {
1520 1520 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
1521 1521 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
1522 1522 };
1523 1523 meta = {
1524 1524 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1525 1525 };
1526 1526 };
1527 1527 "pytest-profiling" = super.buildPythonPackage {
1528 1528 name = "pytest-profiling-1.3.0";
1529 1529 doCheck = false;
1530 1530 propagatedBuildInputs = [
1531 1531 self."six"
1532 1532 self."pytest"
1533 1533 self."gprof2dot"
1534 1534 ];
1535 1535 src = fetchurl {
1536 1536 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
1537 1537 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
1538 1538 };
1539 1539 meta = {
1540 1540 license = [ pkgs.lib.licenses.mit ];
1541 1541 };
1542 1542 };
1543 1543 "pytest-runner" = super.buildPythonPackage {
1544 1544 name = "pytest-runner-4.2";
1545 1545 doCheck = false;
1546 1546 src = fetchurl {
1547 1547 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
1548 1548 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
1549 1549 };
1550 1550 meta = {
1551 1551 license = [ pkgs.lib.licenses.mit ];
1552 1552 };
1553 1553 };
1554 1554 "pytest-sugar" = super.buildPythonPackage {
1555 1555 name = "pytest-sugar-0.9.1";
1556 1556 doCheck = false;
1557 1557 propagatedBuildInputs = [
1558 1558 self."pytest"
1559 1559 self."termcolor"
1560 1560 ];
1561 1561 src = fetchurl {
1562 1562 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
1563 1563 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
1564 1564 };
1565 1565 meta = {
1566 1566 license = [ pkgs.lib.licenses.bsdOriginal ];
1567 1567 };
1568 1568 };
1569 1569 "pytest-timeout" = super.buildPythonPackage {
1570 1570 name = "pytest-timeout-1.3.2";
1571 1571 doCheck = false;
1572 1572 propagatedBuildInputs = [
1573 1573 self."pytest"
1574 1574 ];
1575 1575 src = fetchurl {
1576 1576 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
1577 1577 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
1578 1578 };
1579 1579 meta = {
1580 1580 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1581 1581 };
1582 1582 };
1583 1583 "python-dateutil" = super.buildPythonPackage {
1584 1584 name = "python-dateutil-2.8.0";
1585 1585 doCheck = false;
1586 1586 propagatedBuildInputs = [
1587 1587 self."six"
1588 1588 ];
1589 1589 src = fetchurl {
1590 1590 url = "https://files.pythonhosted.org/packages/ad/99/5b2e99737edeb28c71bcbec5b5dda19d0d9ef3ca3e92e3e925e7c0bb364c/python-dateutil-2.8.0.tar.gz";
1591 1591 sha256 = "17nsfhy4xdz1khrfxa61vd7pmvd5z0wa3zb6v4gb4kfnykv0b668";
1592 1592 };
1593 1593 meta = {
1594 1594 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1595 1595 };
1596 1596 };
1597 1597 "python-editor" = super.buildPythonPackage {
1598 1598 name = "python-editor-1.0.4";
1599 1599 doCheck = false;
1600 1600 src = fetchurl {
1601 1601 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1602 1602 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1603 1603 };
1604 1604 meta = {
1605 1605 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1606 1606 };
1607 1607 };
1608 1608 "python-ldap" = super.buildPythonPackage {
1609 1609 name = "python-ldap-3.1.0";
1610 1610 doCheck = false;
1611 1611 propagatedBuildInputs = [
1612 1612 self."pyasn1"
1613 1613 self."pyasn1-modules"
1614 1614 ];
1615 1615 src = fetchurl {
1616 1616 url = "https://files.pythonhosted.org/packages/7f/1c/28d721dff2fcd2fef9d55b40df63a00be26ec8a11e8c6fc612ae642f9cfd/python-ldap-3.1.0.tar.gz";
1617 1617 sha256 = "1i97nwfnraylyn0myxlf3vciicrf5h6fymrcff9c00k581wmx5s1";
1618 1618 };
1619 1619 meta = {
1620 1620 license = [ pkgs.lib.licenses.psfl ];
1621 1621 };
1622 1622 };
1623 1623 "python-memcached" = super.buildPythonPackage {
1624 1624 name = "python-memcached-1.59";
1625 1625 doCheck = false;
1626 1626 propagatedBuildInputs = [
1627 1627 self."six"
1628 1628 ];
1629 1629 src = fetchurl {
1630 1630 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1631 1631 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1632 1632 };
1633 1633 meta = {
1634 1634 license = [ pkgs.lib.licenses.psfl ];
1635 1635 };
1636 1636 };
1637 1637 "python-pam" = super.buildPythonPackage {
1638 1638 name = "python-pam-1.8.4";
1639 1639 doCheck = false;
1640 1640 src = fetchurl {
1641 1641 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1642 1642 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1643 1643 };
1644 1644 meta = {
1645 1645 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1646 1646 };
1647 1647 };
1648 1648 "python-saml" = super.buildPythonPackage {
1649 1649 name = "python-saml-2.4.2";
1650 1650 doCheck = false;
1651 1651 propagatedBuildInputs = [
1652 1652 self."dm.xmlsec.binding"
1653 1653 self."isodate"
1654 1654 self."defusedxml"
1655 1655 ];
1656 1656 src = fetchurl {
1657 1657 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1658 1658 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1659 1659 };
1660 1660 meta = {
1661 1661 license = [ pkgs.lib.licenses.mit ];
1662 1662 };
1663 1663 };
1664 1664 "pytz" = super.buildPythonPackage {
1665 1665 name = "pytz-2018.4";
1666 1666 doCheck = false;
1667 1667 src = fetchurl {
1668 1668 url = "https://files.pythonhosted.org/packages/10/76/52efda4ef98e7544321fd8d5d512e11739c1df18b0649551aeccfb1c8376/pytz-2018.4.tar.gz";
1669 1669 sha256 = "0jgpqx3kk2rhv81j1izjxvmx8d0x7hzs1857pgqnixic5wq2ar60";
1670 1670 };
1671 1671 meta = {
1672 1672 license = [ pkgs.lib.licenses.mit ];
1673 1673 };
1674 1674 };
1675 1675 "pyzmq" = super.buildPythonPackage {
1676 1676 name = "pyzmq-14.6.0";
1677 1677 doCheck = false;
1678 1678 src = fetchurl {
1679 1679 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1680 1680 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1681 1681 };
1682 1682 meta = {
1683 1683 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1684 1684 };
1685 1685 };
1686 1686 "redis" = super.buildPythonPackage {
1687 1687 name = "redis-2.10.6";
1688 1688 doCheck = false;
1689 1689 src = fetchurl {
1690 1690 url = "https://files.pythonhosted.org/packages/09/8d/6d34b75326bf96d4139a2ddd8e74b80840f800a0a79f9294399e212cb9a7/redis-2.10.6.tar.gz";
1691 1691 sha256 = "03vcgklykny0g0wpvqmy8p6azi2s078317wgb2xjv5m2rs9sjb52";
1692 1692 };
1693 1693 meta = {
1694 1694 license = [ pkgs.lib.licenses.mit ];
1695 1695 };
1696 1696 };
1697 1697 "repoze.lru" = super.buildPythonPackage {
1698 1698 name = "repoze.lru-0.7";
1699 1699 doCheck = false;
1700 1700 src = fetchurl {
1701 1701 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1702 1702 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1703 1703 };
1704 1704 meta = {
1705 1705 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1706 1706 };
1707 1707 };
1708 1708 "repoze.sendmail" = super.buildPythonPackage {
1709 1709 name = "repoze.sendmail-4.4.1";
1710 1710 doCheck = false;
1711 1711 propagatedBuildInputs = [
1712 1712 self."setuptools"
1713 1713 self."zope.interface"
1714 1714 self."transaction"
1715 1715 ];
1716 1716 src = fetchurl {
1717 1717 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1718 1718 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1719 1719 };
1720 1720 meta = {
1721 1721 license = [ pkgs.lib.licenses.zpl21 ];
1722 1722 };
1723 1723 };
1724 1724 "requests" = super.buildPythonPackage {
1725 1725 name = "requests-2.9.1";
1726 1726 doCheck = false;
1727 1727 src = fetchurl {
1728 1728 url = "https://files.pythonhosted.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1729 1729 sha256 = "0zsqrzlybf25xscgi7ja4s48y2abf9wvjkn47wh984qgs1fq2xy5";
1730 1730 };
1731 1731 meta = {
1732 1732 license = [ pkgs.lib.licenses.asl20 ];
1733 1733 };
1734 1734 };
1735 1735 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1736 1736 name = "rhodecode-enterprise-ce-4.17.0";
1737 1737 buildInputs = [
1738 1738 self."pytest"
1739 1739 self."py"
1740 1740 self."pytest-cov"
1741 1741 self."pytest-sugar"
1742 1742 self."pytest-runner"
1743 1743 self."pytest-profiling"
1744 1744 self."pytest-timeout"
1745 1745 self."gprof2dot"
1746 1746 self."mock"
1747 1747 self."cov-core"
1748 1748 self."coverage"
1749 1749 self."webtest"
1750 1750 self."beautifulsoup4"
1751 1751 self."configobj"
1752 1752 ];
1753 1753 doCheck = true;
1754 1754 propagatedBuildInputs = [
1755 1755 self."amqp"
1756 1756 self."authomatic"
1757 1757 self."babel"
1758 1758 self."beaker"
1759 1759 self."bleach"
1760 1760 self."celery"
1761 1761 self."channelstream"
1762 1762 self."click"
1763 1763 self."colander"
1764 1764 self."configobj"
1765 1765 self."cssselect"
1766 1766 self."cryptography"
1767 1767 self."decorator"
1768 1768 self."deform"
1769 1769 self."docutils"
1770 1770 self."dogpile.cache"
1771 1771 self."dogpile.core"
1772 1772 self."formencode"
1773 1773 self."future"
1774 1774 self."futures"
1775 1775 self."infrae.cache"
1776 1776 self."iso8601"
1777 1777 self."itsdangerous"
1778 1778 self."kombu"
1779 1779 self."lxml"
1780 1780 self."mako"
1781 1781 self."markdown"
1782 1782 self."markupsafe"
1783 1783 self."msgpack-python"
1784 1784 self."pyotp"
1785 1785 self."packaging"
1786 1786 self."paste"
1787 1787 self."pastedeploy"
1788 1788 self."pastescript"
1789 1789 self."peppercorn"
1790 1790 self."psutil"
1791 1791 self."py-bcrypt"
1792 1792 self."pycurl"
1793 1793 self."pycrypto"
1794 1794 self."pygments"
1795 1795 self."pyparsing"
1796 1796 self."pyramid-beaker"
1797 1797 self."pyramid-debugtoolbar"
1798 1798 self."pyramid-mako"
1799 1799 self."pyramid"
1800 1800 self."pyramid-mailer"
1801 1801 self."python-dateutil"
1802 1802 self."python-ldap"
1803 1803 self."python-memcached"
1804 1804 self."python-pam"
1805 1805 self."python-saml"
1806 1806 self."pytz"
1807 1807 self."tzlocal"
1808 1808 self."pyzmq"
1809 1809 self."py-gfm"
1810 1810 self."redis"
1811 1811 self."repoze.lru"
1812 1812 self."requests"
1813 1813 self."routes"
1814 1814 self."simplejson"
1815 1815 self."six"
1816 1816 self."sqlalchemy"
1817 1817 self."sshpubkeys"
1818 1818 self."subprocess32"
1819 1819 self."supervisor"
1820 1820 self."translationstring"
1821 1821 self."urllib3"
1822 1822 self."urlobject"
1823 1823 self."venusian"
1824 1824 self."weberror"
1825 1825 self."webhelpers2"
1826 1826 self."webhelpers"
1827 1827 self."webob"
1828 1828 self."whoosh"
1829 1829 self."wsgiref"
1830 1830 self."zope.cachedescriptors"
1831 1831 self."zope.deprecation"
1832 1832 self."zope.event"
1833 1833 self."zope.interface"
1834 1834 self."mysql-python"
1835 1835 self."pymysql"
1836 1836 self."pysqlite"
1837 1837 self."psycopg2"
1838 1838 self."nbconvert"
1839 1839 self."nbformat"
1840 1840 self."jupyter-client"
1841 1841 self."alembic"
1842 1842 self."invoke"
1843 1843 self."bumpversion"
1844 1844 self."gevent"
1845 1845 self."greenlet"
1846 1846 self."gunicorn"
1847 1847 self."waitress"
1848 1848 self."ipdb"
1849 1849 self."ipython"
1850 1850 self."rhodecode-tools"
1851 1851 self."appenlight-client"
1852 1852 self."pytest"
1853 1853 self."py"
1854 1854 self."pytest-cov"
1855 1855 self."pytest-sugar"
1856 1856 self."pytest-runner"
1857 1857 self."pytest-profiling"
1858 1858 self."pytest-timeout"
1859 1859 self."gprof2dot"
1860 1860 self."mock"
1861 1861 self."cov-core"
1862 1862 self."coverage"
1863 1863 self."webtest"
1864 1864 self."beautifulsoup4"
1865 1865 ];
1866 1866 src = ./.;
1867 1867 meta = {
1868 1868 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1869 1869 };
1870 1870 };
1871 1871 "rhodecode-tools" = super.buildPythonPackage {
1872 1872 name = "rhodecode-tools-1.2.1";
1873 1873 doCheck = false;
1874 1874 propagatedBuildInputs = [
1875 1875 self."click"
1876 1876 self."future"
1877 1877 self."six"
1878 1878 self."mako"
1879 1879 self."markupsafe"
1880 1880 self."requests"
1881 1881 self."urllib3"
1882 1882 self."whoosh"
1883 1883 self."elasticsearch"
1884 1884 self."elasticsearch-dsl"
1885 1885 self."elasticsearch2"
1886 1886 self."elasticsearch1-dsl"
1887 1887 ];
1888 1888 src = fetchurl {
1889 1889 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v1.2.1.tar.gz?md5=25bc2f7de1da318e547236d3fb463d28";
1890 1890 sha256 = "1k8l3s4mvshza1zay6dfxprq54fyb5dc85dqdva9wa3f466y0adk";
1891 1891 };
1892 1892 meta = {
1893 1893 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
1894 1894 };
1895 1895 };
1896 1896 "routes" = super.buildPythonPackage {
1897 1897 name = "routes-2.4.1";
1898 1898 doCheck = false;
1899 1899 propagatedBuildInputs = [
1900 1900 self."six"
1901 1901 self."repoze.lru"
1902 1902 ];
1903 1903 src = fetchurl {
1904 1904 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
1905 1905 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
1906 1906 };
1907 1907 meta = {
1908 1908 license = [ pkgs.lib.licenses.mit ];
1909 1909 };
1910 1910 };
1911 1911 "scandir" = super.buildPythonPackage {
1912 1912 name = "scandir-1.10.0";
1913 1913 doCheck = false;
1914 1914 src = fetchurl {
1915 1915 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
1916 1916 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
1917 1917 };
1918 1918 meta = {
1919 1919 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1920 1920 };
1921 1921 };
1922 1922 "setproctitle" = super.buildPythonPackage {
1923 1923 name = "setproctitle-1.1.10";
1924 1924 doCheck = false;
1925 1925 src = fetchurl {
1926 1926 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
1927 1927 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
1928 1928 };
1929 1929 meta = {
1930 1930 license = [ pkgs.lib.licenses.bsdOriginal ];
1931 1931 };
1932 1932 };
1933 1933 "setuptools" = super.buildPythonPackage {
1934 name = "setuptools-40.8.0";
1934 name = "setuptools-41.0.0";
1935 1935 doCheck = false;
1936 1936 src = fetchurl {
1937 url = "https://files.pythonhosted.org/packages/c2/f7/c7b501b783e5a74cf1768bc174ee4fb0a8a6ee5af6afa92274ff964703e0/setuptools-40.8.0.zip";
1938 sha256 = "0k9hifpgahnw2a26w3cr346iy733k6d3nwh3f7g9m13y6f8fqkkf";
1937 url = "https://files.pythonhosted.org/packages/ed/69/c805067de1feedbb98c53174b0f2df44cc05e0e9ee73bb85eebc59e508c6/setuptools-41.0.0.zip";
1938 sha256 = "1cfwy2g23qj3262ivj0b1182lgwz7bqqbka35rkqwypynra05lvr";
1939 1939 };
1940 1940 meta = {
1941 1941 license = [ pkgs.lib.licenses.mit ];
1942 1942 };
1943 1943 };
1944 1944 "simplegeneric" = super.buildPythonPackage {
1945 1945 name = "simplegeneric-0.8.1";
1946 1946 doCheck = false;
1947 1947 src = fetchurl {
1948 1948 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1949 1949 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
1950 1950 };
1951 1951 meta = {
1952 1952 license = [ pkgs.lib.licenses.zpl21 ];
1953 1953 };
1954 1954 };
1955 1955 "simplejson" = super.buildPythonPackage {
1956 1956 name = "simplejson-3.16.0";
1957 1957 doCheck = false;
1958 1958 src = fetchurl {
1959 1959 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
1960 1960 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
1961 1961 };
1962 1962 meta = {
1963 1963 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1964 1964 };
1965 1965 };
1966 1966 "six" = super.buildPythonPackage {
1967 1967 name = "six-1.11.0";
1968 1968 doCheck = false;
1969 1969 src = fetchurl {
1970 1970 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
1971 1971 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
1972 1972 };
1973 1973 meta = {
1974 1974 license = [ pkgs.lib.licenses.mit ];
1975 1975 };
1976 1976 };
1977 1977 "sqlalchemy" = super.buildPythonPackage {
1978 1978 name = "sqlalchemy-1.1.18";
1979 1979 doCheck = false;
1980 1980 src = fetchurl {
1981 1981 url = "https://files.pythonhosted.org/packages/cc/4d/96d93ff77cd67aca7618e402191eee3490d8f5f245d6ab7622d35fe504f4/SQLAlchemy-1.1.18.tar.gz";
1982 1982 sha256 = "1ab4ysip6irajfbxl9wy27kv76miaz8h6759hfx92499z4dcf3lb";
1983 1983 };
1984 1984 meta = {
1985 1985 license = [ pkgs.lib.licenses.mit ];
1986 1986 };
1987 1987 };
1988 1988 "sshpubkeys" = super.buildPythonPackage {
1989 1989 name = "sshpubkeys-3.1.0";
1990 1990 doCheck = false;
1991 1991 propagatedBuildInputs = [
1992 1992 self."cryptography"
1993 1993 self."ecdsa"
1994 1994 ];
1995 1995 src = fetchurl {
1996 1996 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
1997 1997 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
1998 1998 };
1999 1999 meta = {
2000 2000 license = [ pkgs.lib.licenses.bsdOriginal ];
2001 2001 };
2002 2002 };
2003 2003 "subprocess32" = super.buildPythonPackage {
2004 2004 name = "subprocess32-3.5.3";
2005 2005 doCheck = false;
2006 2006 src = fetchurl {
2007 2007 url = "https://files.pythonhosted.org/packages/be/2b/beeba583e9877e64db10b52a96915afc0feabf7144dcbf2a0d0ea68bf73d/subprocess32-3.5.3.tar.gz";
2008 2008 sha256 = "1hr5fan8i719hmlmz73hf8rhq74014w07d8ryg7krvvf6692kj3b";
2009 2009 };
2010 2010 meta = {
2011 2011 license = [ pkgs.lib.licenses.psfl ];
2012 2012 };
2013 2013 };
2014 2014 "supervisor" = super.buildPythonPackage {
2015 name = "supervisor-3.3.5";
2015 name = "supervisor-4.0.1";
2016 2016 doCheck = false;
2017 2017 propagatedBuildInputs = [
2018 2018 self."meld3"
2019 2019 ];
2020 2020 src = fetchurl {
2021 url = "https://files.pythonhosted.org/packages/ba/65/92575a8757ed576beaee59251f64a3287bde82bdc03964b89df9e1d29e1b/supervisor-3.3.5.tar.gz";
2022 sha256 = "1w3ahridzbc6rxfpbyx8lij6pjlcgf2ymzyg53llkjqxalp6sk8v";
2021 url = "https://files.pythonhosted.org/packages/96/ec/f8190beeb0c6d29a30aea10389c11d0164b6ff221931ee84093315ecde6a/supervisor-4.0.1.tar.gz";
2022 sha256 = "10l3z7v6v1fyv7m5zbazzxciwvli2n9a41pxi27p4kixgsfp0s1j";
2023 2023 };
2024 2024 meta = {
2025 2025 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2026 2026 };
2027 2027 };
2028 2028 "tempita" = super.buildPythonPackage {
2029 2029 name = "tempita-0.5.2";
2030 2030 doCheck = false;
2031 2031 src = fetchurl {
2032 2032 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2033 2033 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2034 2034 };
2035 2035 meta = {
2036 2036 license = [ pkgs.lib.licenses.mit ];
2037 2037 };
2038 2038 };
2039 2039 "termcolor" = super.buildPythonPackage {
2040 2040 name = "termcolor-1.1.0";
2041 2041 doCheck = false;
2042 2042 src = fetchurl {
2043 2043 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2044 2044 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2045 2045 };
2046 2046 meta = {
2047 2047 license = [ pkgs.lib.licenses.mit ];
2048 2048 };
2049 2049 };
2050 2050 "testpath" = super.buildPythonPackage {
2051 2051 name = "testpath-0.4.2";
2052 2052 doCheck = false;
2053 2053 src = fetchurl {
2054 2054 url = "https://files.pythonhosted.org/packages/06/30/9a7e917066d851d8b4117e85794b5f14516419ea714a8a2681ec6aa8a981/testpath-0.4.2.tar.gz";
2055 2055 sha256 = "1y40hywscnnyb734pnzm55nd8r8kp1072bjxbil83gcd53cv755n";
2056 2056 };
2057 2057 meta = {
2058 2058 license = [ ];
2059 2059 };
2060 2060 };
2061 2061 "traitlets" = super.buildPythonPackage {
2062 2062 name = "traitlets-4.3.2";
2063 2063 doCheck = false;
2064 2064 propagatedBuildInputs = [
2065 2065 self."ipython-genutils"
2066 2066 self."six"
2067 2067 self."decorator"
2068 2068 self."enum34"
2069 2069 ];
2070 2070 src = fetchurl {
2071 2071 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
2072 2072 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
2073 2073 };
2074 2074 meta = {
2075 2075 license = [ pkgs.lib.licenses.bsdOriginal ];
2076 2076 };
2077 2077 };
2078 2078 "transaction" = super.buildPythonPackage {
2079 2079 name = "transaction-2.4.0";
2080 2080 doCheck = false;
2081 2081 propagatedBuildInputs = [
2082 2082 self."zope.interface"
2083 2083 ];
2084 2084 src = fetchurl {
2085 2085 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2086 2086 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2087 2087 };
2088 2088 meta = {
2089 2089 license = [ pkgs.lib.licenses.zpl21 ];
2090 2090 };
2091 2091 };
2092 2092 "translationstring" = super.buildPythonPackage {
2093 2093 name = "translationstring-1.3";
2094 2094 doCheck = false;
2095 2095 src = fetchurl {
2096 2096 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2097 2097 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2098 2098 };
2099 2099 meta = {
2100 2100 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2101 2101 };
2102 2102 };
2103 2103 "tzlocal" = super.buildPythonPackage {
2104 2104 name = "tzlocal-1.5.1";
2105 2105 doCheck = false;
2106 2106 propagatedBuildInputs = [
2107 2107 self."pytz"
2108 2108 ];
2109 2109 src = fetchurl {
2110 2110 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2111 2111 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2112 2112 };
2113 2113 meta = {
2114 2114 license = [ pkgs.lib.licenses.mit ];
2115 2115 };
2116 2116 };
2117 2117 "urllib3" = super.buildPythonPackage {
2118 2118 name = "urllib3-1.24.1";
2119 2119 doCheck = false;
2120 2120 src = fetchurl {
2121 2121 url = "https://files.pythonhosted.org/packages/b1/53/37d82ab391393565f2f831b8eedbffd57db5a718216f82f1a8b4d381a1c1/urllib3-1.24.1.tar.gz";
2122 2122 sha256 = "08lwd9f3hqznyf32vnzwvp87pchx062nkbgyrf67rwlkgj0jk5fy";
2123 2123 };
2124 2124 meta = {
2125 2125 license = [ pkgs.lib.licenses.mit ];
2126 2126 };
2127 2127 };
2128 2128 "urlobject" = super.buildPythonPackage {
2129 2129 name = "urlobject-2.4.3";
2130 2130 doCheck = false;
2131 2131 src = fetchurl {
2132 2132 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2133 2133 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2134 2134 };
2135 2135 meta = {
2136 2136 license = [ pkgs.lib.licenses.publicDomain ];
2137 2137 };
2138 2138 };
2139 2139 "venusian" = super.buildPythonPackage {
2140 2140 name = "venusian-1.2.0";
2141 2141 doCheck = false;
2142 2142 src = fetchurl {
2143 2143 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2144 2144 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2145 2145 };
2146 2146 meta = {
2147 2147 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2148 2148 };
2149 2149 };
2150 2150 "vine" = super.buildPythonPackage {
2151 2151 name = "vine-1.3.0";
2152 2152 doCheck = false;
2153 2153 src = fetchurl {
2154 2154 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2155 2155 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2156 2156 };
2157 2157 meta = {
2158 2158 license = [ pkgs.lib.licenses.bsdOriginal ];
2159 2159 };
2160 2160 };
2161 2161 "waitress" = super.buildPythonPackage {
2162 2162 name = "waitress-1.1.0";
2163 2163 doCheck = false;
2164 2164 src = fetchurl {
2165 2165 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
2166 2166 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
2167 2167 };
2168 2168 meta = {
2169 2169 license = [ pkgs.lib.licenses.zpl21 ];
2170 2170 };
2171 2171 };
2172 2172 "wcwidth" = super.buildPythonPackage {
2173 2173 name = "wcwidth-0.1.7";
2174 2174 doCheck = false;
2175 2175 src = fetchurl {
2176 2176 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
2177 2177 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
2178 2178 };
2179 2179 meta = {
2180 2180 license = [ pkgs.lib.licenses.mit ];
2181 2181 };
2182 2182 };
2183 2183 "webencodings" = super.buildPythonPackage {
2184 2184 name = "webencodings-0.5.1";
2185 2185 doCheck = false;
2186 2186 src = fetchurl {
2187 2187 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2188 2188 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2189 2189 };
2190 2190 meta = {
2191 2191 license = [ pkgs.lib.licenses.bsdOriginal ];
2192 2192 };
2193 2193 };
2194 2194 "weberror" = super.buildPythonPackage {
2195 2195 name = "weberror-0.10.3";
2196 2196 doCheck = false;
2197 2197 propagatedBuildInputs = [
2198 2198 self."webob"
2199 2199 self."tempita"
2200 2200 self."pygments"
2201 2201 self."paste"
2202 2202 ];
2203 2203 src = fetchurl {
2204 2204 url = "https://files.pythonhosted.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
2205 2205 sha256 = "0frg4kvycqpj5bi8asfqfs6bxsr2cvjvb6b56c4d1ai1z57kbjx6";
2206 2206 };
2207 2207 meta = {
2208 2208 license = [ pkgs.lib.licenses.mit ];
2209 2209 };
2210 2210 };
2211 2211 "webhelpers" = super.buildPythonPackage {
2212 2212 name = "webhelpers-1.3";
2213 2213 doCheck = false;
2214 2214 propagatedBuildInputs = [
2215 2215 self."markupsafe"
2216 2216 ];
2217 2217 src = fetchurl {
2218 2218 url = "https://files.pythonhosted.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
2219 2219 sha256 = "10x5i82qdkrvyw18gsybwggfhfpl869siaab89vnndi9x62g51pa";
2220 2220 };
2221 2221 meta = {
2222 2222 license = [ pkgs.lib.licenses.bsdOriginal ];
2223 2223 };
2224 2224 };
2225 2225 "webhelpers2" = super.buildPythonPackage {
2226 2226 name = "webhelpers2-2.0";
2227 2227 doCheck = false;
2228 2228 propagatedBuildInputs = [
2229 2229 self."markupsafe"
2230 2230 self."six"
2231 2231 ];
2232 2232 src = fetchurl {
2233 2233 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2234 2234 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2235 2235 };
2236 2236 meta = {
2237 2237 license = [ pkgs.lib.licenses.mit ];
2238 2238 };
2239 2239 };
2240 2240 "webob" = super.buildPythonPackage {
2241 2241 name = "webob-1.8.5";
2242 2242 doCheck = false;
2243 2243 src = fetchurl {
2244 2244 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2245 2245 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2246 2246 };
2247 2247 meta = {
2248 2248 license = [ pkgs.lib.licenses.mit ];
2249 2249 };
2250 2250 };
2251 2251 "webtest" = super.buildPythonPackage {
2252 2252 name = "webtest-2.0.33";
2253 2253 doCheck = false;
2254 2254 propagatedBuildInputs = [
2255 2255 self."six"
2256 2256 self."webob"
2257 2257 self."waitress"
2258 2258 self."beautifulsoup4"
2259 2259 ];
2260 2260 src = fetchurl {
2261 2261 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
2262 2262 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
2263 2263 };
2264 2264 meta = {
2265 2265 license = [ pkgs.lib.licenses.mit ];
2266 2266 };
2267 2267 };
2268 2268 "whoosh" = super.buildPythonPackage {
2269 2269 name = "whoosh-2.7.4";
2270 2270 doCheck = false;
2271 2271 src = fetchurl {
2272 2272 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2273 2273 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2274 2274 };
2275 2275 meta = {
2276 2276 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2277 2277 };
2278 2278 };
2279 2279 "ws4py" = super.buildPythonPackage {
2280 2280 name = "ws4py-0.5.1";
2281 2281 doCheck = false;
2282 2282 src = fetchurl {
2283 2283 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2284 2284 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2285 2285 };
2286 2286 meta = {
2287 2287 license = [ pkgs.lib.licenses.bsdOriginal ];
2288 2288 };
2289 2289 };
2290 2290 "wsgiref" = super.buildPythonPackage {
2291 2291 name = "wsgiref-0.1.2";
2292 2292 doCheck = false;
2293 2293 src = fetchurl {
2294 2294 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2295 2295 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2296 2296 };
2297 2297 meta = {
2298 2298 license = [ { fullName = "PSF or ZPL"; } ];
2299 2299 };
2300 2300 };
2301 2301 "zope.cachedescriptors" = super.buildPythonPackage {
2302 2302 name = "zope.cachedescriptors-4.3.1";
2303 2303 doCheck = false;
2304 2304 propagatedBuildInputs = [
2305 2305 self."setuptools"
2306 2306 ];
2307 2307 src = fetchurl {
2308 2308 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2309 2309 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2310 2310 };
2311 2311 meta = {
2312 2312 license = [ pkgs.lib.licenses.zpl21 ];
2313 2313 };
2314 2314 };
2315 2315 "zope.deprecation" = super.buildPythonPackage {
2316 2316 name = "zope.deprecation-4.4.0";
2317 2317 doCheck = false;
2318 2318 propagatedBuildInputs = [
2319 2319 self."setuptools"
2320 2320 ];
2321 2321 src = fetchurl {
2322 2322 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2323 2323 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2324 2324 };
2325 2325 meta = {
2326 2326 license = [ pkgs.lib.licenses.zpl21 ];
2327 2327 };
2328 2328 };
2329 2329 "zope.event" = super.buildPythonPackage {
2330 2330 name = "zope.event-4.4";
2331 2331 doCheck = false;
2332 2332 propagatedBuildInputs = [
2333 2333 self."setuptools"
2334 2334 ];
2335 2335 src = fetchurl {
2336 2336 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2337 2337 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2338 2338 };
2339 2339 meta = {
2340 2340 license = [ pkgs.lib.licenses.zpl21 ];
2341 2341 };
2342 2342 };
2343 2343 "zope.interface" = super.buildPythonPackage {
2344 2344 name = "zope.interface-4.6.0";
2345 2345 doCheck = false;
2346 2346 propagatedBuildInputs = [
2347 2347 self."setuptools"
2348 2348 ];
2349 2349 src = fetchurl {
2350 2350 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2351 2351 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2352 2352 };
2353 2353 meta = {
2354 2354 license = [ pkgs.lib.licenses.zpl21 ];
2355 2355 };
2356 2356 };
2357 2357
2358 2358 ### Test requirements
2359 2359
2360 2360
2361 2361 }
@@ -1,124 +1,124 b''
1 1 ## dependencies
2 2
3 3 amqp==2.3.1
4 4 # not released authomatic that has updated some oauth providers
5 5 https://code.rhodecode.com/upstream/authomatic/archive/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e.tar.gz?md5=3c68720a1322b25254009518d1ff6801#egg=authomatic==0.1.0.post1
6 6
7 7 babel==1.3
8 8 beaker==1.9.1
9 9 bleach==3.1.0
10 10 celery==4.1.1
11 11 channelstream==0.5.2
12 12 click==7.0
13 13 colander==1.7.0
14 14 # our custom configobj
15 15 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
16 16 cssselect==1.0.3
17 17 cryptography==2.6.1
18 18 decorator==4.1.2
19 19 deform==2.0.7
20 20 docutils==0.14.0
21 21 dogpile.cache==0.7.1
22 22 dogpile.core==0.4.1
23 23 formencode==1.2.4
24 24 future==0.14.3
25 25 futures==3.0.2
26 26 infrae.cache==1.0.1
27 27 iso8601==0.1.12
28 28 itsdangerous==0.24
29 29 kombu==4.2.1
30 30 lxml==4.2.5
31 31 mako==1.0.7
32 32 markdown==2.6.11
33 33 markupsafe==1.1.0
34 34 msgpack-python==0.5.6
35 35 pyotp==2.2.7
36 36 packaging==15.2
37 37 paste==3.0.8
38 38 pastedeploy==2.0.1
39 39 pastescript==3.1.0
40 40 peppercorn==0.6
41 41 psutil==5.5.1
42 42 py-bcrypt==0.4
43 43 pycurl==7.43.0.2
44 44 pycrypto==2.6.1
45 45 pygments==2.3.1
46 46 pyparsing==2.3.0
47 47 pyramid-beaker==0.8
48 48 pyramid-debugtoolbar==4.5.0
49 49 pyramid-mako==1.0.2
50 pyramid==1.10.2
50 pyramid==1.10.4
51 51 pyramid_mailer==0.15.1
52 52 python-dateutil
53 53 python-ldap==3.1.0
54 54 python-memcached==1.59
55 55 python-pam==1.8.4
56 56 python-saml==2.4.2
57 57 pytz==2018.4
58 58 tzlocal==1.5.1
59 59 pyzmq==14.6.0
60 60 py-gfm==0.1.4
61 61 redis==2.10.6
62 62 repoze.lru==0.7
63 63 requests==2.9.1
64 64 routes==2.4.1
65 65 simplejson==3.16.0
66 66 six==1.11.0
67 67 sqlalchemy==1.1.18
68 68 sshpubkeys==3.1.0
69 69 subprocess32==3.5.3
70 supervisor==3.3.5
70 supervisor==4.0.1
71 71 translationstring==1.3
72 72 urllib3==1.24.1
73 73 urlobject==2.4.3
74 74 venusian==1.2.0
75 75 weberror==0.10.3
76 76 webhelpers2==2.0
77 77 webhelpers==1.3
78 78 webob==1.8.5
79 79 whoosh==2.7.4
80 80 wsgiref==0.1.2
81 81 zope.cachedescriptors==4.3.1
82 82 zope.deprecation==4.4.0
83 83 zope.event==4.4.0
84 84 zope.interface==4.6.0
85 85
86 86 # DB drivers
87 87 mysql-python==1.2.5
88 88 pymysql==0.8.1
89 89 pysqlite==2.8.3
90 psycopg2==2.7.7
90 psycopg2==2.8.2
91 91
92 92 # IPYTHON RENDERING
93 93 # entrypoints backport, pypi version doesn't support egg installs
94 94 https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313#egg=entrypoints==0.2.2.rhodecode-upstream1
95 95 nbconvert==5.3.1
96 96 nbformat==4.4.0
97 97 jupyter_client==5.0.0
98 98
99 99 ## cli tools
100 alembic==1.0.8
100 alembic==1.0.9
101 101 invoke==0.13.0
102 102 bumpversion==0.5.3
103 103
104 104 ## http servers
105 105 gevent==1.4.0
106 106 greenlet==0.4.15
107 107 gunicorn==19.9.0
108 108 waitress==1.1.0
109 109
110 110 ## debug
111 111 ipdb==0.12.0
112 112 ipython==5.1.0
113 113
114 114 ## rhodecode-tools, special case
115 115 https://code.rhodecode.com/rhodecode-tools-ce/archive/v1.2.1.tar.gz?md5=25bc2f7de1da318e547236d3fb463d28#egg=rhodecode-tools==1.2.1
116 116
117 117 ## appenlight
118 118 appenlight-client==0.6.26
119 119
120 120 ## test related requirements
121 121 -r requirements_test.txt
122 122
123 123 ## uncomment to add the debug libraries
124 124 #-r requirements_debug.txt
@@ -1,16 +1,16 b''
1 1 # test related requirements
2 2 pytest==3.8.2
3 3 py==1.6.0
4 4 pytest-cov==2.6.0
5 5 pytest-sugar==0.9.1
6 6 pytest-runner==4.2.0
7 7 pytest-profiling==1.3.0
8 8 pytest-timeout==1.3.2
9 9 gprof2dot==2017.9.19
10 10
11 11 mock==1.0.1
12 12 cov-core==1.15.0
13 coverage==4.5.1
13 coverage==4.5.3
14 14
15 15 webtest==2.0.33
16 16 beautifulsoup4==4.6.3
@@ -1,57 +1,57 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import platform
24 24
25 25 VERSION = tuple(open(os.path.join(
26 26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
27 27
28 28 BACKENDS = {
29 29 'hg': 'Mercurial repository',
30 30 'git': 'Git repository',
31 31 'svn': 'Subversion repository',
32 32 }
33 33
34 34 CELERY_ENABLED = False
35 35 CELERY_EAGER = False
36 36
37 37 # link to config for pyramid
38 38 CONFIG = {}
39 39
40 40 # Populated with the settings dictionary from application init in
41 41 # rhodecode.conf.environment.load_pyramid_environment
42 42 PYRAMID_SETTINGS = {}
43 43
44 44 # Linked module for extensions
45 45 EXTENSIONS = {}
46 46
47 47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
48 __dbversion__ = 95 # defines current db version for migrations
48 __dbversion__ = 97 # defines current db version for migrations
49 49 __platform__ = platform.system()
50 50 __license__ = 'AGPLv3, and Commercial License'
51 51 __author__ = 'RhodeCode GmbH'
52 52 __url__ = 'https://code.rhodecode.com'
53 53
54 54 is_windows = __platform__ in ['Windows']
55 55 is_unix = not is_windows
56 56 is_test = False
57 57 disable_error_handler = False
@@ -1,446 +1,450 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 from rhodecode.apps._base import ADMIN_PREFIX
23 23
24 24
25 25 def admin_routes(config):
26 26 """
27 27 Admin prefixed routes
28 28 """
29 29
30 30 config.add_route(
31 31 name='admin_audit_logs',
32 32 pattern='/audit_logs')
33 33
34 34 config.add_route(
35 35 name='admin_audit_log_entry',
36 36 pattern='/audit_logs/{audit_log_id}')
37 37
38 38 config.add_route(
39 39 name='pull_requests_global_0', # backward compat
40 40 pattern='/pull_requests/{pull_request_id:\d+}')
41 41 config.add_route(
42 42 name='pull_requests_global_1', # backward compat
43 43 pattern='/pull-requests/{pull_request_id:\d+}')
44 44 config.add_route(
45 45 name='pull_requests_global',
46 46 pattern='/pull-request/{pull_request_id:\d+}')
47 47
48 48 config.add_route(
49 49 name='admin_settings_open_source',
50 50 pattern='/settings/open_source')
51 51 config.add_route(
52 52 name='admin_settings_vcs_svn_generate_cfg',
53 53 pattern='/settings/vcs/svn_generate_cfg')
54 54
55 55 config.add_route(
56 56 name='admin_settings_system',
57 57 pattern='/settings/system')
58 58 config.add_route(
59 59 name='admin_settings_system_update',
60 60 pattern='/settings/system/updates')
61 61
62 62 config.add_route(
63 63 name='admin_settings_exception_tracker',
64 64 pattern='/settings/exceptions')
65 65 config.add_route(
66 66 name='admin_settings_exception_tracker_delete_all',
67 67 pattern='/settings/exceptions/delete')
68 68 config.add_route(
69 69 name='admin_settings_exception_tracker_show',
70 70 pattern='/settings/exceptions/{exception_id}')
71 71 config.add_route(
72 72 name='admin_settings_exception_tracker_delete',
73 73 pattern='/settings/exceptions/{exception_id}/delete')
74 74
75 75 config.add_route(
76 76 name='admin_settings_sessions',
77 77 pattern='/settings/sessions')
78 78 config.add_route(
79 79 name='admin_settings_sessions_cleanup',
80 80 pattern='/settings/sessions/cleanup')
81 81
82 82 config.add_route(
83 83 name='admin_settings_process_management',
84 84 pattern='/settings/process_management')
85 85 config.add_route(
86 86 name='admin_settings_process_management_data',
87 87 pattern='/settings/process_management/data')
88 88 config.add_route(
89 89 name='admin_settings_process_management_signal',
90 90 pattern='/settings/process_management/signal')
91 91 config.add_route(
92 92 name='admin_settings_process_management_master_signal',
93 93 pattern='/settings/process_management/master_signal')
94 94
95 95 # default settings
96 96 config.add_route(
97 97 name='admin_defaults_repositories',
98 98 pattern='/defaults/repositories')
99 99 config.add_route(
100 100 name='admin_defaults_repositories_update',
101 101 pattern='/defaults/repositories/update')
102 102
103 103 # admin settings
104 104
105 105 config.add_route(
106 106 name='admin_settings',
107 107 pattern='/settings')
108 108 config.add_route(
109 109 name='admin_settings_update',
110 110 pattern='/settings/update')
111 111
112 112 config.add_route(
113 113 name='admin_settings_global',
114 114 pattern='/settings/global')
115 115 config.add_route(
116 116 name='admin_settings_global_update',
117 117 pattern='/settings/global/update')
118 118
119 119 config.add_route(
120 120 name='admin_settings_vcs',
121 121 pattern='/settings/vcs')
122 122 config.add_route(
123 123 name='admin_settings_vcs_update',
124 124 pattern='/settings/vcs/update')
125 125 config.add_route(
126 126 name='admin_settings_vcs_svn_pattern_delete',
127 127 pattern='/settings/vcs/svn_pattern_delete')
128 128
129 129 config.add_route(
130 130 name='admin_settings_mapping',
131 131 pattern='/settings/mapping')
132 132 config.add_route(
133 133 name='admin_settings_mapping_update',
134 134 pattern='/settings/mapping/update')
135 135
136 136 config.add_route(
137 137 name='admin_settings_visual',
138 138 pattern='/settings/visual')
139 139 config.add_route(
140 140 name='admin_settings_visual_update',
141 141 pattern='/settings/visual/update')
142 142
143 143
144 144 config.add_route(
145 145 name='admin_settings_issuetracker',
146 146 pattern='/settings/issue-tracker')
147 147 config.add_route(
148 148 name='admin_settings_issuetracker_update',
149 149 pattern='/settings/issue-tracker/update')
150 150 config.add_route(
151 151 name='admin_settings_issuetracker_test',
152 152 pattern='/settings/issue-tracker/test')
153 153 config.add_route(
154 154 name='admin_settings_issuetracker_delete',
155 155 pattern='/settings/issue-tracker/delete')
156 156
157 157 config.add_route(
158 158 name='admin_settings_email',
159 159 pattern='/settings/email')
160 160 config.add_route(
161 161 name='admin_settings_email_update',
162 162 pattern='/settings/email/update')
163 163
164 164 config.add_route(
165 165 name='admin_settings_hooks',
166 166 pattern='/settings/hooks')
167 167 config.add_route(
168 168 name='admin_settings_hooks_update',
169 169 pattern='/settings/hooks/update')
170 170 config.add_route(
171 171 name='admin_settings_hooks_delete',
172 172 pattern='/settings/hooks/delete')
173 173
174 174 config.add_route(
175 175 name='admin_settings_search',
176 176 pattern='/settings/search')
177 177
178 178 config.add_route(
179 179 name='admin_settings_labs',
180 180 pattern='/settings/labs')
181 181 config.add_route(
182 182 name='admin_settings_labs_update',
183 183 pattern='/settings/labs/update')
184 184
185 185 # Automation EE feature
186 186 config.add_route(
187 187 'admin_settings_automation',
188 188 pattern=ADMIN_PREFIX + '/settings/automation')
189 189
190 190 # global permissions
191 191
192 192 config.add_route(
193 193 name='admin_permissions_application',
194 194 pattern='/permissions/application')
195 195 config.add_route(
196 196 name='admin_permissions_application_update',
197 197 pattern='/permissions/application/update')
198 198
199 199 config.add_route(
200 200 name='admin_permissions_global',
201 201 pattern='/permissions/global')
202 202 config.add_route(
203 203 name='admin_permissions_global_update',
204 204 pattern='/permissions/global/update')
205 205
206 206 config.add_route(
207 207 name='admin_permissions_object',
208 208 pattern='/permissions/object')
209 209 config.add_route(
210 210 name='admin_permissions_object_update',
211 211 pattern='/permissions/object/update')
212 212
213 213 # Branch perms EE feature
214 214 config.add_route(
215 215 name='admin_permissions_branch',
216 216 pattern='/permissions/branch')
217 217
218 218 config.add_route(
219 219 name='admin_permissions_ips',
220 220 pattern='/permissions/ips')
221 221
222 222 config.add_route(
223 223 name='admin_permissions_overview',
224 224 pattern='/permissions/overview')
225 225
226 226 config.add_route(
227 227 name='admin_permissions_auth_token_access',
228 228 pattern='/permissions/auth_token_access')
229 229
230 230 config.add_route(
231 231 name='admin_permissions_ssh_keys',
232 232 pattern='/permissions/ssh_keys')
233 233 config.add_route(
234 234 name='admin_permissions_ssh_keys_data',
235 235 pattern='/permissions/ssh_keys/data')
236 236 config.add_route(
237 237 name='admin_permissions_ssh_keys_update',
238 238 pattern='/permissions/ssh_keys/update')
239 239
240 240 # users admin
241 241 config.add_route(
242 242 name='users',
243 243 pattern='/users')
244 244
245 245 config.add_route(
246 246 name='users_data',
247 247 pattern='/users_data')
248 248
249 249 config.add_route(
250 250 name='users_create',
251 251 pattern='/users/create')
252 252
253 253 config.add_route(
254 254 name='users_new',
255 255 pattern='/users/new')
256 256
257 257 # user management
258 258 config.add_route(
259 259 name='user_edit',
260 260 pattern='/users/{user_id:\d+}/edit',
261 261 user_route=True)
262 262 config.add_route(
263 263 name='user_edit_advanced',
264 264 pattern='/users/{user_id:\d+}/edit/advanced',
265 265 user_route=True)
266 266 config.add_route(
267 267 name='user_edit_global_perms',
268 268 pattern='/users/{user_id:\d+}/edit/global_permissions',
269 269 user_route=True)
270 270 config.add_route(
271 271 name='user_edit_global_perms_update',
272 272 pattern='/users/{user_id:\d+}/edit/global_permissions/update',
273 273 user_route=True)
274 274 config.add_route(
275 275 name='user_update',
276 276 pattern='/users/{user_id:\d+}/update',
277 277 user_route=True)
278 278 config.add_route(
279 279 name='user_delete',
280 280 pattern='/users/{user_id:\d+}/delete',
281 281 user_route=True)
282 282 config.add_route(
283 283 name='user_enable_force_password_reset',
284 284 pattern='/users/{user_id:\d+}/password_reset_enable',
285 285 user_route=True)
286 286 config.add_route(
287 287 name='user_disable_force_password_reset',
288 288 pattern='/users/{user_id:\d+}/password_reset_disable',
289 289 user_route=True)
290 290 config.add_route(
291 291 name='user_create_personal_repo_group',
292 292 pattern='/users/{user_id:\d+}/create_repo_group',
293 293 user_route=True)
294 294
295 295 # user auth tokens
296 296 config.add_route(
297 297 name='edit_user_auth_tokens',
298 298 pattern='/users/{user_id:\d+}/edit/auth_tokens',
299 299 user_route=True)
300 300 config.add_route(
301 301 name='edit_user_auth_tokens_add',
302 302 pattern='/users/{user_id:\d+}/edit/auth_tokens/new',
303 303 user_route=True)
304 304 config.add_route(
305 305 name='edit_user_auth_tokens_delete',
306 306 pattern='/users/{user_id:\d+}/edit/auth_tokens/delete',
307 307 user_route=True)
308 308
309 309 # user ssh keys
310 310 config.add_route(
311 311 name='edit_user_ssh_keys',
312 312 pattern='/users/{user_id:\d+}/edit/ssh_keys',
313 313 user_route=True)
314 314 config.add_route(
315 315 name='edit_user_ssh_keys_generate_keypair',
316 316 pattern='/users/{user_id:\d+}/edit/ssh_keys/generate',
317 317 user_route=True)
318 318 config.add_route(
319 319 name='edit_user_ssh_keys_add',
320 320 pattern='/users/{user_id:\d+}/edit/ssh_keys/new',
321 321 user_route=True)
322 322 config.add_route(
323 323 name='edit_user_ssh_keys_delete',
324 324 pattern='/users/{user_id:\d+}/edit/ssh_keys/delete',
325 325 user_route=True)
326 326
327 327 # user emails
328 328 config.add_route(
329 329 name='edit_user_emails',
330 330 pattern='/users/{user_id:\d+}/edit/emails',
331 331 user_route=True)
332 332 config.add_route(
333 333 name='edit_user_emails_add',
334 334 pattern='/users/{user_id:\d+}/edit/emails/new',
335 335 user_route=True)
336 336 config.add_route(
337 337 name='edit_user_emails_delete',
338 338 pattern='/users/{user_id:\d+}/edit/emails/delete',
339 339 user_route=True)
340 340
341 341 # user IPs
342 342 config.add_route(
343 343 name='edit_user_ips',
344 344 pattern='/users/{user_id:\d+}/edit/ips',
345 345 user_route=True)
346 346 config.add_route(
347 347 name='edit_user_ips_add',
348 348 pattern='/users/{user_id:\d+}/edit/ips/new',
349 349 user_route_with_default=True) # enabled for default user too
350 350 config.add_route(
351 351 name='edit_user_ips_delete',
352 352 pattern='/users/{user_id:\d+}/edit/ips/delete',
353 353 user_route_with_default=True) # enabled for default user too
354 354
355 355 # user perms
356 356 config.add_route(
357 357 name='edit_user_perms_summary',
358 358 pattern='/users/{user_id:\d+}/edit/permissions_summary',
359 359 user_route=True)
360 360 config.add_route(
361 361 name='edit_user_perms_summary_json',
362 362 pattern='/users/{user_id:\d+}/edit/permissions_summary/json',
363 363 user_route=True)
364 364
365 365 # user user groups management
366 366 config.add_route(
367 367 name='edit_user_groups_management',
368 368 pattern='/users/{user_id:\d+}/edit/groups_management',
369 369 user_route=True)
370 370
371 371 config.add_route(
372 372 name='edit_user_groups_management_updates',
373 373 pattern='/users/{user_id:\d+}/edit/edit_user_groups_management/updates',
374 374 user_route=True)
375 375
376 376 # user audit logs
377 377 config.add_route(
378 378 name='edit_user_audit_logs',
379 379 pattern='/users/{user_id:\d+}/edit/audit', user_route=True)
380 380
381 381 # user caches
382 382 config.add_route(
383 383 name='edit_user_caches',
384 384 pattern='/users/{user_id:\d+}/edit/caches',
385 385 user_route=True)
386 386 config.add_route(
387 387 name='edit_user_caches_update',
388 388 pattern='/users/{user_id:\d+}/edit/caches/update',
389 389 user_route=True)
390 390
391 391 # user-groups admin
392 392 config.add_route(
393 393 name='user_groups',
394 394 pattern='/user_groups')
395 395
396 396 config.add_route(
397 397 name='user_groups_data',
398 398 pattern='/user_groups_data')
399 399
400 400 config.add_route(
401 401 name='user_groups_new',
402 402 pattern='/user_groups/new')
403 403
404 404 config.add_route(
405 405 name='user_groups_create',
406 406 pattern='/user_groups/create')
407 407
408 408 # repos admin
409 409 config.add_route(
410 410 name='repos',
411 411 pattern='/repos')
412 412
413 413 config.add_route(
414 414 name='repo_new',
415 415 pattern='/repos/new')
416 416
417 417 config.add_route(
418 418 name='repo_create',
419 419 pattern='/repos/create')
420 420
421 421 # repo groups admin
422 422 config.add_route(
423 423 name='repo_groups',
424 424 pattern='/repo_groups')
425 425
426 426 config.add_route(
427 name='repo_groups_data',
428 pattern='/repo_groups_data')
429
430 config.add_route(
427 431 name='repo_group_new',
428 432 pattern='/repo_group/new')
429 433
430 434 config.add_route(
431 435 name='repo_group_create',
432 436 pattern='/repo_group/create')
433 437
434 438
435 439 def includeme(config):
436 440 from rhodecode.apps._base.navigation import includeme as nav_includeme
437 441
438 442 # Create admin navigation registry and add it to the pyramid registry.
439 443 nav_includeme(config)
440 444
441 445 # main admin routes
442 446 config.add_route(name='admin_home', pattern=ADMIN_PREFIX)
443 447 config.include(admin_routes, route_prefix=ADMIN_PREFIX)
444 448
445 449 # Scan module for configuration decorators.
446 450 config.scan('.views', ignore='.tests')
@@ -1,176 +1,194 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import pytest
23 23
24 24 from rhodecode.apps._base import ADMIN_PREFIX
25 25 from rhodecode.lib import helpers as h
26 from rhodecode.model.db import Repository, UserRepoToPerm, User
26 from rhodecode.model.db import Repository, UserRepoToPerm, User, RepoGroup
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.model.repo_group import RepoGroupModel
29 29 from rhodecode.tests import (
30 assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH, TestController)
30 assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH)
31 31 from rhodecode.tests.fixture import Fixture
32 32
33 33 fixture = Fixture()
34 34
35 35
36 36 def route_path(name, params=None, **kwargs):
37 37 import urllib
38 38
39 39 base_url = {
40 40 'repo_groups': ADMIN_PREFIX + '/repo_groups',
41 'repo_groups_data': ADMIN_PREFIX + '/repo_groups_data',
41 42 'repo_group_new': ADMIN_PREFIX + '/repo_group/new',
42 43 'repo_group_create': ADMIN_PREFIX + '/repo_group/create',
43 44
44 45 }[name].format(**kwargs)
45 46
46 47 if params:
47 48 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
48 49 return base_url
49 50
50 51
51 52 def _get_permission_for_user(user, repo):
52 53 perm = UserRepoToPerm.query()\
53 54 .filter(UserRepoToPerm.repository ==
54 55 Repository.get_by_repo_name(repo))\
55 56 .filter(UserRepoToPerm.user == User.get_by_username(user))\
56 57 .all()
57 58 return perm
58 59
59 60
60 61 @pytest.mark.usefixtures("app")
61 62 class TestAdminRepositoryGroups(object):
63
62 64 def test_show_repo_groups(self, autologin_user):
63 response = self.app.get(route_path('repo_groups'))
64 response.mustcontain('data: []')
65 self.app.get(route_path('repo_groups'))
66
67 def test_show_repo_groups_data(self, autologin_user, xhr_header):
68 response = self.app.get(route_path(
69 'repo_groups_data'), extra_environ=xhr_header)
70
71 all_repo_groups = RepoGroup.query().count()
72 assert response.json['recordsTotal'] == all_repo_groups
65 73
66 def test_show_repo_groups_after_creating_group(self, autologin_user):
74 def test_show_repo_groups_data_filtered(self, autologin_user, xhr_header):
75 response = self.app.get(route_path(
76 'repo_groups_data', params={'search[value]': 'empty_search'}),
77 extra_environ=xhr_header)
78
79 all_repo_groups = RepoGroup.query().count()
80 assert response.json['recordsTotal'] == all_repo_groups
81 assert response.json['recordsFiltered'] == 0
82
83 def test_show_repo_groups_after_creating_group(self, autologin_user, xhr_header):
67 84 fixture.create_repo_group('test_repo_group')
68 response = self.app.get(route_path('repo_groups'))
85 response = self.app.get(route_path(
86 'repo_groups_data'), extra_environ=xhr_header)
69 87 response.mustcontain('"name_raw": "test_repo_group"')
70 88 fixture.destroy_repo_group('test_repo_group')
71 89
72 90 def test_new(self, autologin_user):
73 91 self.app.get(route_path('repo_group_new'))
74 92
75 93 def test_new_with_parent_group(self, autologin_user, user_util):
76 94 gr = user_util.create_repo_group()
77 95
78 96 self.app.get(route_path('repo_group_new'),
79 97 params=dict(parent_group=gr.group_name))
80 98
81 99 def test_new_by_regular_user_no_permission(self, autologin_regular_user):
82 100 self.app.get(route_path('repo_group_new'), status=403)
83 101
84 102 @pytest.mark.parametrize('repo_group_name', [
85 103 'git_repo',
86 104 'git_repo_ąć',
87 105 'hg_repo',
88 106 '12345',
89 107 'hg_repo_ąć',
90 108 ])
91 109 def test_create(self, autologin_user, repo_group_name, csrf_token):
92 110 repo_group_name_unicode = repo_group_name.decode('utf8')
93 111 description = 'description for newly created repo group'
94 112
95 113 response = self.app.post(
96 114 route_path('repo_group_create'),
97 115 fixture._get_group_create_params(
98 116 group_name=repo_group_name,
99 117 group_description=description,
100 118 csrf_token=csrf_token))
101 119
102 120 # run the check page that triggers the flash message
103 121 repo_gr_url = h.route_path(
104 122 'repo_group_home', repo_group_name=repo_group_name)
105 123
106 124 assert_session_flash(
107 125 response,
108 126 'Created repository group <a href="%s">%s</a>' % (
109 127 repo_gr_url, repo_group_name_unicode))
110 128
111 129 # # test if the repo group was created in the database
112 130 new_repo_group = RepoGroupModel()._get_repo_group(
113 131 repo_group_name_unicode)
114 132 assert new_repo_group is not None
115 133
116 134 assert new_repo_group.group_name == repo_group_name_unicode
117 135 assert new_repo_group.group_description == description
118 136
119 137 # test if the repository is visible in the list ?
120 138 response = self.app.get(repo_gr_url)
121 139 response.mustcontain(repo_group_name)
122 140
123 141 # test if the repository group was created on filesystem
124 142 is_on_filesystem = os.path.isdir(
125 143 os.path.join(TESTS_TMP_PATH, repo_group_name))
126 144 if not is_on_filesystem:
127 145 self.fail('no repo group %s in filesystem' % repo_group_name)
128 146
129 147 RepoGroupModel().delete(repo_group_name_unicode)
130 148 Session().commit()
131 149
132 150 @pytest.mark.parametrize('repo_group_name', [
133 151 'git_repo',
134 152 'git_repo_ąć',
135 153 'hg_repo',
136 154 '12345',
137 155 'hg_repo_ąć',
138 156 ])
139 157 def test_create_subgroup(self, autologin_user, user_util, repo_group_name, csrf_token):
140 158 parent_group = user_util.create_repo_group()
141 159 parent_group_name = parent_group.group_name
142 160
143 161 expected_group_name = '{}/{}'.format(
144 162 parent_group_name, repo_group_name)
145 163 expected_group_name_unicode = expected_group_name.decode('utf8')
146 164
147 165 try:
148 166 response = self.app.post(
149 167 route_path('repo_group_create'),
150 168 fixture._get_group_create_params(
151 169 group_name=repo_group_name,
152 170 group_parent_id=parent_group.group_id,
153 171 group_description='Test desciption',
154 172 csrf_token=csrf_token))
155 173
156 174 assert_session_flash(
157 175 response,
158 176 u'Created repository group <a href="%s">%s</a>' % (
159 177 h.route_path('repo_group_home',
160 178 repo_group_name=expected_group_name),
161 179 expected_group_name_unicode))
162 180 finally:
163 181 RepoGroupModel().delete(expected_group_name_unicode)
164 182 Session().commit()
165 183
166 184 def test_user_with_creation_permissions_cannot_create_subgroups(
167 185 self, autologin_regular_user, user_util):
168 186
169 187 user_util.grant_user_permission(
170 188 TEST_USER_REGULAR_LOGIN, 'hg.repogroup.create.true')
171 189 parent_group = user_util.create_repo_group()
172 190 parent_group_id = parent_group.group_id
173 191 self.app.get(
174 192 route_path('repo_group_new',
175 193 params=dict(parent_group=parent_group_id), ),
176 194 status=403)
@@ -1,215 +1,361 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20 import datetime
21 21 import logging
22 22 import formencode
23 23 import formencode.htmlfill
24 24
25 25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 26 from pyramid.view import view_config
27 27 from pyramid.renderers import render
28 28 from pyramid.response import Response
29 29
30 30 from rhodecode import events
31 31 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 32
33 from rhodecode.lib.ext_json import json
34 33 from rhodecode.lib.auth import (
35 34 LoginRequired, CSRFRequired, NotAnonymous,
36 35 HasPermissionAny, HasRepoGroupPermissionAny)
37 36 from rhodecode.lib import helpers as h, audit_logger
38 from rhodecode.lib.utils2 import safe_int, safe_unicode
37 from rhodecode.lib.utils2 import safe_int, safe_unicode, datetime_to_time
39 38 from rhodecode.model.forms import RepoGroupForm
40 39 from rhodecode.model.repo_group import RepoGroupModel
41 40 from rhodecode.model.scm import RepoGroupList
42 from rhodecode.model.db import Session, RepoGroup
41 from rhodecode.model.db import (
42 or_, count, func, in_filter_generator, Session, RepoGroup, User, Repository)
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 class AdminRepoGroupsView(BaseAppView, DataGridAppView):
48 48
49 49 def load_default_context(self):
50 50 c = self._get_local_tmpl_context()
51 51
52 52 return c
53 53
54 54 def _load_form_data(self, c):
55 55 allow_empty_group = False
56 56
57 57 if self._can_create_repo_group():
58 58 # we're global admin, we're ok and we can create TOP level groups
59 59 allow_empty_group = True
60 60
61 61 # override the choices for this form, we need to filter choices
62 62 # and display only those we have ADMIN right
63 63 groups_with_admin_rights = RepoGroupList(
64 64 RepoGroup.query().all(),
65 65 perm_set=['group.admin'])
66 66 c.repo_groups = RepoGroup.groups_choices(
67 67 groups=groups_with_admin_rights,
68 68 show_empty_group=allow_empty_group)
69 69
70 70 def _can_create_repo_group(self, parent_group_id=None):
71 71 is_admin = HasPermissionAny('hg.admin')('group create controller')
72 72 create_repo_group = HasPermissionAny(
73 73 'hg.repogroup.create.true')('group create controller')
74 74 if is_admin or (create_repo_group and not parent_group_id):
75 75 # we're global admin, or we have global repo group create
76 76 # permission
77 77 # we're ok and we can create TOP level groups
78 78 return True
79 79 elif parent_group_id:
80 80 # we check the permission if we can write to parent group
81 81 group = RepoGroup.get(parent_group_id)
82 82 group_name = group.group_name if group else None
83 83 if HasRepoGroupPermissionAny('group.admin')(
84 84 group_name, 'check if user is an admin of group'):
85 85 # we're an admin of passed in group, we're ok.
86 86 return True
87 87 else:
88 88 return False
89 89 return False
90 90
91 # permission check in data loading of
92 # `repo_group_list_data` via RepoGroupList
91 93 @LoginRequired()
92 94 @NotAnonymous()
93 # perms check inside
94 95 @view_config(
95 96 route_name='repo_groups', request_method='GET',
96 97 renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako')
97 98 def repo_group_list(self):
98 99 c = self.load_default_context()
100 return self._get_template_context(c)
99 101
100 repo_group_list = RepoGroup.get_all_repo_groups()
101 repo_group_list_acl = RepoGroupList(
102 repo_group_list, perm_set=['group.admin'])
103 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
104 repo_group_list=repo_group_list_acl, admin=True)
105 c.data = json.dumps(repo_group_data)
106 return self._get_template_context(c)
102 # permission check inside
103 @LoginRequired()
104 @NotAnonymous()
105 @view_config(
106 route_name='repo_groups_data', request_method='GET',
107 renderer='json_ext', xhr=True)
108 def repo_group_list_data(self):
109 self.load_default_context()
110 column_map = {
111 'name_raw': 'group_name_hash',
112 'desc': 'group_description',
113 'last_change_raw': 'updated_on',
114 'top_level_repos': 'repos_total',
115 'owner': 'user_username',
116 }
117 draw, start, limit = self._extract_chunk(self.request)
118 search_q, order_by, order_dir = self._extract_ordering(
119 self.request, column_map=column_map)
120
121 _render = self.request.get_partial_renderer(
122 'rhodecode:templates/data_table/_dt_elements.mako')
123 c = _render.get_call_context()
124
125 def quick_menu(repo_group_name):
126 return _render('quick_repo_group_menu', repo_group_name)
127
128 def repo_group_lnk(repo_group_name):
129 return _render('repo_group_name', repo_group_name)
130
131 def last_change(last_change):
132 if isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
133 delta = datetime.timedelta(
134 seconds=(datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
135 last_change = last_change + delta
136 return _render("last_change", last_change)
137
138 def desc(desc, personal):
139 return _render(
140 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
141
142 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
143 return _render(
144 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
145
146 def user_profile(username):
147 return _render('user_profile', username)
148
149 auth_repo_group_list = RepoGroupList(
150 RepoGroup.query().all(), perm_set=['group.admin'])
151
152 allowed_ids = [-1]
153 for repo_group in auth_repo_group_list:
154 allowed_ids.append(repo_group.group_id)
155
156 repo_groups_data_total_count = RepoGroup.query()\
157 .filter(or_(
158 # generate multiple IN to fix limitation problems
159 *in_filter_generator(RepoGroup.group_id, allowed_ids)
160 )) \
161 .count()
162
163 repo_groups_data_total_inactive_count = RepoGroup.query()\
164 .filter(RepoGroup.group_id.in_(allowed_ids))\
165 .count()
166
167 repo_count = count(Repository.repo_id)
168 base_q = Session.query(
169 RepoGroup.group_name,
170 RepoGroup.group_name_hash,
171 RepoGroup.group_description,
172 RepoGroup.group_id,
173 RepoGroup.personal,
174 RepoGroup.updated_on,
175 User,
176 repo_count.label('repos_count')
177 ) \
178 .filter(or_(
179 # generate multiple IN to fix limitation problems
180 *in_filter_generator(RepoGroup.group_id, allowed_ids)
181 )) \
182 .outerjoin(Repository) \
183 .join(User, User.user_id == RepoGroup.user_id) \
184 .group_by(RepoGroup, User)
185
186 if search_q:
187 like_expression = u'%{}%'.format(safe_unicode(search_q))
188 base_q = base_q.filter(or_(
189 RepoGroup.group_name.ilike(like_expression),
190 ))
191
192 repo_groups_data_total_filtered_count = base_q.count()
193 # the inactive isn't really used, but we still make it same as other data grids
194 # which use inactive (users,user groups)
195 repo_groups_data_total_filtered_inactive_count = repo_groups_data_total_filtered_count
196
197 sort_defined = False
198 if order_by == 'group_name':
199 sort_col = func.lower(RepoGroup.group_name)
200 sort_defined = True
201 elif order_by == 'repos_total':
202 sort_col = repo_count
203 sort_defined = True
204 elif order_by == 'user_username':
205 sort_col = User.username
206 else:
207 sort_col = getattr(RepoGroup, order_by, None)
208
209 if sort_defined or sort_col:
210 if order_dir == 'asc':
211 sort_col = sort_col.asc()
212 else:
213 sort_col = sort_col.desc()
214
215 base_q = base_q.order_by(sort_col)
216 base_q = base_q.offset(start).limit(limit)
217
218 # authenticated access to user groups
219 auth_repo_group_list = base_q.all()
220
221 repo_groups_data = []
222 for repo_gr in auth_repo_group_list:
223 row = {
224 "menu": quick_menu(repo_gr.group_name),
225 "name": repo_group_lnk(repo_gr.group_name),
226 "name_raw": repo_gr.group_name,
227 "last_change": last_change(repo_gr.updated_on),
228 "last_change_raw": datetime_to_time(repo_gr.updated_on),
229
230 "last_changeset": "",
231 "last_changeset_raw": "",
232
233 "desc": desc(repo_gr.group_description, repo_gr.personal),
234 "owner": user_profile(repo_gr.User.username),
235 "top_level_repos": repo_gr.repos_count,
236 "action": repo_group_actions(
237 repo_gr.group_id, repo_gr.group_name, repo_gr.repos_count),
238
239 }
240
241 repo_groups_data.append(row)
242
243 data = ({
244 'draw': draw,
245 'data': repo_groups_data,
246 'recordsTotal': repo_groups_data_total_count,
247 'recordsTotalInactive': repo_groups_data_total_inactive_count,
248 'recordsFiltered': repo_groups_data_total_filtered_count,
249 'recordsFilteredInactive': repo_groups_data_total_filtered_inactive_count,
250 })
251
252 return data
107 253
108 254 @LoginRequired()
109 255 @NotAnonymous()
110 256 # perm checks inside
111 257 @view_config(
112 258 route_name='repo_group_new', request_method='GET',
113 259 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
114 260 def repo_group_new(self):
115 261 c = self.load_default_context()
116 262
117 263 # perm check for admin, create_group perm or admin of parent_group
118 264 parent_group_id = safe_int(self.request.GET.get('parent_group'))
119 265 if not self._can_create_repo_group(parent_group_id):
120 266 raise HTTPForbidden()
121 267
122 268 self._load_form_data(c)
123 269
124 270 defaults = {} # Future proof for default of repo group
125 271 data = render(
126 272 'rhodecode:templates/admin/repo_groups/repo_group_add.mako',
127 273 self._get_template_context(c), self.request)
128 274 html = formencode.htmlfill.render(
129 275 data,
130 276 defaults=defaults,
131 277 encoding="UTF-8",
132 278 force_defaults=False
133 279 )
134 280 return Response(html)
135 281
136 282 @LoginRequired()
137 283 @NotAnonymous()
138 284 @CSRFRequired()
139 285 # perm checks inside
140 286 @view_config(
141 287 route_name='repo_group_create', request_method='POST',
142 288 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
143 289 def repo_group_create(self):
144 290 c = self.load_default_context()
145 291 _ = self.request.translate
146 292
147 293 parent_group_id = safe_int(self.request.POST.get('group_parent_id'))
148 294 can_create = self._can_create_repo_group(parent_group_id)
149 295
150 296 self._load_form_data(c)
151 297 # permissions for can create group based on parent_id are checked
152 298 # here in the Form
153 299 available_groups = map(lambda k: safe_unicode(k[0]), c.repo_groups)
154 300 repo_group_form = RepoGroupForm(
155 301 self.request.translate, available_groups=available_groups,
156 302 can_create_in_root=can_create)()
157 303
158 304 repo_group_name = self.request.POST.get('group_name')
159 305 try:
160 306 owner = self._rhodecode_user
161 307 form_result = repo_group_form.to_python(dict(self.request.POST))
162 308 copy_permissions = form_result.get('group_copy_permissions')
163 309 repo_group = RepoGroupModel().create(
164 310 group_name=form_result['group_name_full'],
165 311 group_description=form_result['group_description'],
166 312 owner=owner.user_id,
167 313 copy_permissions=form_result['group_copy_permissions']
168 314 )
169 315 Session().flush()
170 316
171 317 repo_group_data = repo_group.get_api_data()
172 318 audit_logger.store_web(
173 319 'repo_group.create', action_data={'data': repo_group_data},
174 320 user=self._rhodecode_user)
175 321
176 322 Session().commit()
177 323
178 324 _new_group_name = form_result['group_name_full']
179 325
180 326 repo_group_url = h.link_to(
181 327 _new_group_name,
182 328 h.route_path('repo_group_home', repo_group_name=_new_group_name))
183 329 h.flash(h.literal(_('Created repository group %s')
184 330 % repo_group_url), category='success')
185 331
186 332 except formencode.Invalid as errors:
187 333 data = render(
188 334 'rhodecode:templates/admin/repo_groups/repo_group_add.mako',
189 335 self._get_template_context(c), self.request)
190 336 html = formencode.htmlfill.render(
191 337 data,
192 338 defaults=errors.value,
193 339 errors=errors.error_dict or {},
194 340 prefix_error=False,
195 341 encoding="UTF-8",
196 342 force_defaults=False
197 343 )
198 344 return Response(html)
199 345 except Exception:
200 346 log.exception("Exception during creation of repository group")
201 347 h.flash(_('Error occurred during creation of repository group %s')
202 348 % repo_group_name, category='error')
203 349 raise HTTPFound(h.route_path('home'))
204 350
205 351 affected_user_ids = [self._rhodecode_user.user_id]
206 352 if copy_permissions:
207 353 user_group_perms = repo_group.permissions(expand_from_user_groups=True)
208 354 copy_perms = [perm['user_id'] for perm in user_group_perms]
209 355 # also include those newly created by copy
210 356 affected_user_ids.extend(copy_perms)
211 357 events.trigger(events.UserPermissionsChange(affected_user_ids))
212 358
213 359 raise HTTPFound(
214 360 h.route_path('repo_group_home',
215 361 repo_group_name=form_result['group_name_full']))
@@ -1,259 +1,271 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 import formencode
24 24 import formencode.htmlfill
25 25
26 26 from pyramid.httpexceptions import HTTPFound
27 27 from pyramid.view import view_config
28 28 from pyramid.response import Response
29 29 from pyramid.renderers import render
30 30
31 31 from rhodecode import events
32 32 from rhodecode.apps._base import BaseAppView, DataGridAppView
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
35 35 from rhodecode.lib import helpers as h, audit_logger
36 36 from rhodecode.lib.utils2 import safe_unicode
37 37
38 38 from rhodecode.model.forms import UserGroupForm
39 39 from rhodecode.model.permission import PermissionModel
40 40 from rhodecode.model.scm import UserGroupList
41 41 from rhodecode.model.db import (
42 or_, count, User, UserGroup, UserGroupMember)
42 or_, count, User, UserGroup, UserGroupMember, in_filter_generator)
43 43 from rhodecode.model.meta import Session
44 44 from rhodecode.model.user_group import UserGroupModel
45 45 from rhodecode.model.db import true
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class AdminUserGroupsView(BaseAppView, DataGridAppView):
51 51
52 52 def load_default_context(self):
53 53 c = self._get_local_tmpl_context()
54 54
55 55 PermissionModel().set_global_permission_choices(
56 56 c, gettext_translator=self.request.translate)
57 57
58 58 return c
59 59
60 60 # permission check in data loading of
61 61 # `user_groups_list_data` via UserGroupList
62 62 @LoginRequired()
63 63 @NotAnonymous()
64 64 @view_config(
65 65 route_name='user_groups', request_method='GET',
66 66 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
67 67 def user_groups_list(self):
68 68 c = self.load_default_context()
69 69 return self._get_template_context(c)
70 70
71 71 # permission check inside
72 72 @LoginRequired()
73 73 @NotAnonymous()
74 74 @view_config(
75 75 route_name='user_groups_data', request_method='GET',
76 76 renderer='json_ext', xhr=True)
77 77 def user_groups_list_data(self):
78 78 self.load_default_context()
79 79 column_map = {
80 80 'active': 'users_group_active',
81 81 'description': 'user_group_description',
82 82 'members': 'members_total',
83 83 'owner': 'user_username',
84 84 'sync': 'group_data'
85 85 }
86 86 draw, start, limit = self._extract_chunk(self.request)
87 87 search_q, order_by, order_dir = self._extract_ordering(
88 88 self.request, column_map=column_map)
89 89
90 90 _render = self.request.get_partial_renderer(
91 91 'rhodecode:templates/data_table/_dt_elements.mako')
92 92
93 93 def user_group_name(user_group_name):
94 94 return _render("user_group_name", user_group_name)
95 95
96 96 def user_group_actions(user_group_id, user_group_name):
97 97 return _render("user_group_actions", user_group_id, user_group_name)
98 98
99 99 def user_profile(username):
100 100 return _render('user_profile', username)
101 101
102 102 auth_user_group_list = UserGroupList(
103 103 UserGroup.query().all(), perm_set=['usergroup.admin'])
104 104
105 105 allowed_ids = [-1]
106 106 for user_group in auth_user_group_list:
107 107 allowed_ids.append(user_group.users_group_id)
108 108
109 109 user_groups_data_total_count = UserGroup.query()\
110 .filter(UserGroup.users_group_id.in_(allowed_ids))\
110 .filter(or_(
111 # generate multiple IN to fix limitation problems
112 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
113 ))\
111 114 .count()
112 115
113 116 user_groups_data_total_inactive_count = UserGroup.query()\
114 .filter(UserGroup.users_group_id.in_(allowed_ids))\
117 .filter(or_(
118 # generate multiple IN to fix limitation problems
119 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
120 ))\
115 121 .filter(UserGroup.users_group_active != true()).count()
116 122
117 123 member_count = count(UserGroupMember.user_id)
118 124 base_q = Session.query(
119 125 UserGroup.users_group_name,
120 126 UserGroup.user_group_description,
121 127 UserGroup.users_group_active,
122 128 UserGroup.users_group_id,
123 129 UserGroup.group_data,
124 130 User,
125 131 member_count.label('member_count')
126 132 ) \
127 .filter(UserGroup.users_group_id.in_(allowed_ids)) \
133 .filter(or_(
134 # generate multiple IN to fix limitation problems
135 *in_filter_generator(UserGroup.users_group_id, allowed_ids)
136 )) \
128 137 .outerjoin(UserGroupMember) \
129 138 .join(User, User.user_id == UserGroup.user_id) \
130 139 .group_by(UserGroup, User)
131 140
132 141 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
133 142
134 143 if search_q:
135 144 like_expression = u'%{}%'.format(safe_unicode(search_q))
136 145 base_q = base_q.filter(or_(
137 146 UserGroup.users_group_name.ilike(like_expression),
138 147 ))
139 148 base_q_inactive = base_q.filter(UserGroup.users_group_active != true())
140 149
141 150 user_groups_data_total_filtered_count = base_q.count()
142 151 user_groups_data_total_filtered_inactive_count = base_q_inactive.count()
143 152
153 sort_defined = False
144 154 if order_by == 'members_total':
145 155 sort_col = member_count
156 sort_defined = True
146 157 elif order_by == 'user_username':
147 158 sort_col = User.username
148 159 else:
149 160 sort_col = getattr(UserGroup, order_by, None)
150 161
151 if isinstance(sort_col, count) or sort_col:
162 if sort_defined or sort_col:
152 163 if order_dir == 'asc':
153 164 sort_col = sort_col.asc()
154 165 else:
155 166 sort_col = sort_col.desc()
156 167
157 168 base_q = base_q.order_by(sort_col)
158 169 base_q = base_q.offset(start).limit(limit)
159 170
160 171 # authenticated access to user groups
161 172 auth_user_group_list = base_q.all()
162 173
163 174 user_groups_data = []
164 175 for user_gr in auth_user_group_list:
165 user_groups_data.append({
176 row = {
166 177 "users_group_name": user_group_name(user_gr.users_group_name),
167 178 "name_raw": h.escape(user_gr.users_group_name),
168 179 "description": h.escape(user_gr.user_group_description),
169 180 "members": user_gr.member_count,
170 181 # NOTE(marcink): because of advanced query we
171 182 # need to load it like that
172 183 "sync": UserGroup._load_sync(
173 184 UserGroup._load_group_data(user_gr.group_data)),
174 185 "active": h.bool2icon(user_gr.users_group_active),
175 186 "owner": user_profile(user_gr.User.username),
176 187 "action": user_group_actions(
177 188 user_gr.users_group_id, user_gr.users_group_name)
178 })
189 }
190 user_groups_data.append(row)
179 191
180 192 data = ({
181 193 'draw': draw,
182 194 'data': user_groups_data,
183 195 'recordsTotal': user_groups_data_total_count,
184 196 'recordsTotalInactive': user_groups_data_total_inactive_count,
185 197 'recordsFiltered': user_groups_data_total_filtered_count,
186 198 'recordsFilteredInactive': user_groups_data_total_filtered_inactive_count,
187 199 })
188 200
189 201 return data
190 202
191 203 @LoginRequired()
192 204 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
193 205 @view_config(
194 206 route_name='user_groups_new', request_method='GET',
195 207 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
196 208 def user_groups_new(self):
197 209 c = self.load_default_context()
198 210 return self._get_template_context(c)
199 211
200 212 @LoginRequired()
201 213 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
202 214 @CSRFRequired()
203 215 @view_config(
204 216 route_name='user_groups_create', request_method='POST',
205 217 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
206 218 def user_groups_create(self):
207 219 _ = self.request.translate
208 220 c = self.load_default_context()
209 221 users_group_form = UserGroupForm(self.request.translate)()
210 222
211 223 user_group_name = self.request.POST.get('users_group_name')
212 224 try:
213 225 form_result = users_group_form.to_python(dict(self.request.POST))
214 226 user_group = UserGroupModel().create(
215 227 name=form_result['users_group_name'],
216 228 description=form_result['user_group_description'],
217 229 owner=self._rhodecode_user.user_id,
218 230 active=form_result['users_group_active'])
219 231 Session().flush()
220 232 creation_data = user_group.get_api_data()
221 233 user_group_name = form_result['users_group_name']
222 234
223 235 audit_logger.store_web(
224 236 'user_group.create', action_data={'data': creation_data},
225 237 user=self._rhodecode_user)
226 238
227 239 user_group_link = h.link_to(
228 240 h.escape(user_group_name),
229 241 h.route_path(
230 242 'edit_user_group', user_group_id=user_group.users_group_id))
231 243 h.flash(h.literal(_('Created user group %(user_group_link)s')
232 244 % {'user_group_link': user_group_link}),
233 245 category='success')
234 246 Session().commit()
235 247 user_group_id = user_group.users_group_id
236 248 except formencode.Invalid as errors:
237 249
238 250 data = render(
239 251 'rhodecode:templates/admin/user_groups/user_group_add.mako',
240 252 self._get_template_context(c), self.request)
241 253 html = formencode.htmlfill.render(
242 254 data,
243 255 defaults=errors.value,
244 256 errors=errors.error_dict or {},
245 257 prefix_error=False,
246 258 encoding="UTF-8",
247 259 force_defaults=False
248 260 )
249 261 return Response(html)
250 262
251 263 except Exception:
252 264 log.exception("Exception creating user group")
253 265 h.flash(_('Error occurred during creation of user group %s') \
254 266 % user_group_name, category='error')
255 267 raise HTTPFound(h.route_path('user_groups_new'))
256 268
257 269 events.trigger(events.UserPermissionsChange([self._rhodecode_user.user_id]))
258 270 raise HTTPFound(
259 271 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -1,316 +1,310 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24 from pyramid.httpexceptions import HTTPFound
25 25
26 26 from rhodecode import events
27 27 from rhodecode.apps._base import RepoAppView
28 28 from rhodecode.lib import helpers as h
29 29 from rhodecode.lib import audit_logger
30 30 from rhodecode.lib.auth import (
31 31 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired,
32 32 HasRepoPermissionAny)
33 33 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs import RepositoryError
36 36 from rhodecode.model.db import Session, UserFollowing, User, Repository
37 37 from rhodecode.model.repo import RepoModel
38 38 from rhodecode.model.scm import ScmModel
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 class RepoSettingsView(RepoAppView):
44 44
45 45 def load_default_context(self):
46 46 c = self._get_local_tmpl_context()
47 47 return c
48 48
49 49 def _get_users_with_permissions(self):
50 50 user_permissions = {}
51 51 for perm in self.db_repo.permissions():
52 52 user_permissions[perm.user_id] = perm
53 53
54 54 return user_permissions
55 55
56 56 @LoginRequired()
57 57 @HasRepoPermissionAnyDecorator('repository.admin')
58 58 @view_config(
59 59 route_name='edit_repo_advanced', request_method='GET',
60 60 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
61 61 def edit_advanced(self):
62 62 c = self.load_default_context()
63 63 c.active = 'advanced'
64 64
65 65 c.default_user_id = User.get_default_user().user_id
66 66 c.in_public_journal = UserFollowing.query() \
67 67 .filter(UserFollowing.user_id == c.default_user_id) \
68 68 .filter(UserFollowing.follows_repository == self.db_repo).scalar()
69 69
70 c.has_origin_repo_read_perm = False
71 if self.db_repo.fork:
72 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
73 'repository.write', 'repository.read', 'repository.admin')(
74 self.db_repo.fork.repo_name, 'repo set as fork page')
75
76 70 c.ver_info_dict = self.rhodecode_vcs_repo.get_hooks_info()
77 71
78 72 return self._get_template_context(c)
79 73
80 74 @LoginRequired()
81 75 @HasRepoPermissionAnyDecorator('repository.admin')
82 76 @CSRFRequired()
83 77 @view_config(
84 78 route_name='edit_repo_advanced_archive', request_method='POST',
85 79 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
86 80 def edit_advanced_archive(self):
87 81 """
88 82 Archives the repository. It will become read-only, and not visible in search
89 83 or other queries. But still visible for super-admins.
90 84 """
91 85
92 86 _ = self.request.translate
93 87
94 88 try:
95 89 old_data = self.db_repo.get_api_data()
96 90 RepoModel().archive(self.db_repo)
97 91
98 92 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
99 93 audit_logger.store_web(
100 94 'repo.archive', action_data={'old_data': old_data},
101 95 user=self._rhodecode_user, repo=repo)
102 96
103 97 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
104 98 h.flash(
105 99 _('Archived repository `%s`') % self.db_repo_name,
106 100 category='success')
107 101 Session().commit()
108 102 except Exception:
109 103 log.exception("Exception during archiving of repository")
110 104 h.flash(_('An error occurred during archiving of `%s`')
111 105 % self.db_repo_name, category='error')
112 106 # redirect to advanced for more deletion options
113 107 raise HTTPFound(
114 108 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
115 109 _anchor='advanced-archive'))
116 110
117 111 # flush permissions for all users defined in permissions
118 112 affected_user_ids = self._get_users_with_permissions().keys()
119 113 events.trigger(events.UserPermissionsChange(affected_user_ids))
120 114
121 115 raise HTTPFound(h.route_path('home'))
122 116
123 117 @LoginRequired()
124 118 @HasRepoPermissionAnyDecorator('repository.admin')
125 119 @CSRFRequired()
126 120 @view_config(
127 121 route_name='edit_repo_advanced_delete', request_method='POST',
128 122 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
129 123 def edit_advanced_delete(self):
130 124 """
131 125 Deletes the repository, or shows warnings if deletion is not possible
132 126 because of attached forks or other errors.
133 127 """
134 128 _ = self.request.translate
135 129 handle_forks = self.request.POST.get('forks', None)
136 130 if handle_forks == 'detach_forks':
137 131 handle_forks = 'detach'
138 132 elif handle_forks == 'delete_forks':
139 133 handle_forks = 'delete'
140 134
141 135 try:
142 136 old_data = self.db_repo.get_api_data()
143 137 RepoModel().delete(self.db_repo, forks=handle_forks)
144 138
145 139 _forks = self.db_repo.forks.count()
146 140 if _forks and handle_forks:
147 141 if handle_forks == 'detach_forks':
148 142 h.flash(_('Detached %s forks') % _forks, category='success')
149 143 elif handle_forks == 'delete_forks':
150 144 h.flash(_('Deleted %s forks') % _forks, category='success')
151 145
152 146 repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name)
153 147 audit_logger.store_web(
154 148 'repo.delete', action_data={'old_data': old_data},
155 149 user=self._rhodecode_user, repo=repo)
156 150
157 151 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
158 152 h.flash(
159 153 _('Deleted repository `%s`') % self.db_repo_name,
160 154 category='success')
161 155 Session().commit()
162 156 except AttachedForksError:
163 157 repo_advanced_url = h.route_path(
164 158 'edit_repo_advanced', repo_name=self.db_repo_name,
165 159 _anchor='advanced-delete')
166 160 delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url)
167 161 h.flash(_('Cannot delete `{repo}` it still contains attached forks. '
168 162 'Try using {delete_or_detach} option.')
169 163 .format(repo=self.db_repo_name, delete_or_detach=delete_anchor),
170 164 category='warning')
171 165
172 166 # redirect to advanced for forks handle action ?
173 167 raise HTTPFound(repo_advanced_url)
174 168
175 169 except AttachedPullRequestsError:
176 170 repo_advanced_url = h.route_path(
177 171 'edit_repo_advanced', repo_name=self.db_repo_name,
178 172 _anchor='advanced-delete')
179 173 attached_prs = len(self.db_repo.pull_requests_source +
180 174 self.db_repo.pull_requests_target)
181 175 h.flash(
182 176 _('Cannot delete `{repo}` it still contains {num} attached pull requests. '
183 177 'Consider archiving the repository instead.').format(
184 178 repo=self.db_repo_name, num=attached_prs), category='warning')
185 179
186 180 # redirect to advanced for forks handle action ?
187 181 raise HTTPFound(repo_advanced_url)
188 182
189 183 except Exception:
190 184 log.exception("Exception during deletion of repository")
191 185 h.flash(_('An error occurred during deletion of `%s`')
192 186 % self.db_repo_name, category='error')
193 187 # redirect to advanced for more deletion options
194 188 raise HTTPFound(
195 189 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name,
196 190 _anchor='advanced-delete'))
197 191
198 192 raise HTTPFound(h.route_path('home'))
199 193
200 194 @LoginRequired()
201 195 @HasRepoPermissionAnyDecorator('repository.admin')
202 196 @CSRFRequired()
203 197 @view_config(
204 198 route_name='edit_repo_advanced_journal', request_method='POST',
205 199 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
206 200 def edit_advanced_journal(self):
207 201 """
208 202 Set's this repository to be visible in public journal,
209 203 in other words making default user to follow this repo
210 204 """
211 205 _ = self.request.translate
212 206
213 207 try:
214 208 user_id = User.get_default_user().user_id
215 209 ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id)
216 210 h.flash(_('Updated repository visibility in public journal'),
217 211 category='success')
218 212 Session().commit()
219 213 except Exception:
220 214 h.flash(_('An error occurred during setting this '
221 215 'repository in public journal'),
222 216 category='error')
223 217
224 218 raise HTTPFound(
225 219 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
226 220
227 221 @LoginRequired()
228 222 @HasRepoPermissionAnyDecorator('repository.admin')
229 223 @CSRFRequired()
230 224 @view_config(
231 225 route_name='edit_repo_advanced_fork', request_method='POST',
232 226 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
233 227 def edit_advanced_fork(self):
234 228 """
235 229 Mark given repository as a fork of another
236 230 """
237 231 _ = self.request.translate
238 232
239 233 new_fork_id = safe_int(self.request.POST.get('id_fork_of'))
240 234
241 235 # valid repo, re-check permissions
242 236 if new_fork_id:
243 237 repo = Repository.get(new_fork_id)
244 238 # ensure we have at least read access to the repo we mark
245 239 perm_check = HasRepoPermissionAny(
246 240 'repository.read', 'repository.write', 'repository.admin')
247 241
248 242 if repo and perm_check(repo_name=repo.repo_name):
249 243 new_fork_id = repo.repo_id
250 244 else:
251 245 new_fork_id = None
252 246
253 247 try:
254 248 repo = ScmModel().mark_as_fork(
255 249 self.db_repo_name, new_fork_id, self._rhodecode_user.user_id)
256 250 fork = repo.fork.repo_name if repo.fork else _('Nothing')
257 251 Session().commit()
258 252 h.flash(
259 253 _('Marked repo %s as fork of %s') % (self.db_repo_name, fork),
260 254 category='success')
261 255 except RepositoryError as e:
262 256 log.exception("Repository Error occurred")
263 257 h.flash(str(e), category='error')
264 258 except Exception:
265 259 log.exception("Exception while editing fork")
266 260 h.flash(_('An error occurred during this operation'),
267 261 category='error')
268 262
269 263 raise HTTPFound(
270 264 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
271 265
272 266 @LoginRequired()
273 267 @HasRepoPermissionAnyDecorator('repository.admin')
274 268 @CSRFRequired()
275 269 @view_config(
276 270 route_name='edit_repo_advanced_locking', request_method='POST',
277 271 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
278 272 def edit_advanced_locking(self):
279 273 """
280 274 Toggle locking of repository
281 275 """
282 276 _ = self.request.translate
283 277 set_lock = self.request.POST.get('set_lock')
284 278 set_unlock = self.request.POST.get('set_unlock')
285 279
286 280 try:
287 281 if set_lock:
288 282 Repository.lock(self.db_repo, self._rhodecode_user.user_id,
289 283 lock_reason=Repository.LOCK_WEB)
290 284 h.flash(_('Locked repository'), category='success')
291 285 elif set_unlock:
292 286 Repository.unlock(self.db_repo)
293 287 h.flash(_('Unlocked repository'), category='success')
294 288 except Exception as e:
295 289 log.exception("Exception during unlocking")
296 290 h.flash(_('An error occurred during unlocking'), category='error')
297 291
298 292 raise HTTPFound(
299 293 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
300 294
301 295 @LoginRequired()
302 296 @HasRepoPermissionAnyDecorator('repository.admin')
303 297 @view_config(
304 298 route_name='edit_repo_advanced_hooks', request_method='GET',
305 299 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
306 300 def edit_advanced_install_hooks(self):
307 301 """
308 302 Install Hooks for repository
309 303 """
310 304 _ = self.request.translate
311 305 self.load_default_context()
312 306 self.rhodecode_vcs_repo.install_hooks(force=True)
313 307 h.flash(_('installed updated hooks into this repository'),
314 308 category='success')
315 309 raise HTTPFound(
316 310 h.route_path('edit_repo_advanced', repo_name=self.db_repo_name))
@@ -1,123 +1,147 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 import shutil
24 23 import logging
25 24 import tempfile
26 25 import textwrap
27
26 import collections
28 27 from .base import VcsServer
28 from rhodecode.model.db import RhodeCodeUi
29 from rhodecode.model.settings import VcsSettingsModel
29 30
30 31 log = logging.getLogger(__name__)
31 32
32 33
33 34 class MercurialTunnelWrapper(object):
34 35 process = None
35 36
36 37 def __init__(self, server):
37 38 self.server = server
38 39 self.stdin = sys.stdin
39 40 self.stdout = sys.stdout
40 self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp()
41 self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp()
41 self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp(prefix='hgrc_rhodecode_')
42 42
43 43 def create_hooks_env(self):
44 repo_name = self.server.repo_name
45 hg_flags = self.config_to_hgrc(repo_name)
44 46
45 47 content = textwrap.dedent(
46 48 '''
47 # SSH hooks version=1.0.0
48 [hooks]
49 pretxnchangegroup.ssh_auth=python:vcsserver.hooks.pre_push_ssh_auth
50 pretxnchangegroup.ssh=python:vcsserver.hooks.pre_push_ssh
51 changegroup.ssh=python:vcsserver.hooks.post_push_ssh
49 # RhodeCode SSH hooks version=2.0.0
50 {custom}
51 '''
52 ).format(custom='\n'.join(hg_flags))
52 53
53 preoutgoing.ssh=python:vcsserver.hooks.pre_pull_ssh
54 outgoing.ssh=python:vcsserver.hooks.post_pull_ssh
54 root = self.server.get_root_store()
55 hgrc_custom = os.path.join(root, repo_name, '.hg', 'hgrc_rhodecode')
56 hgrc_main = os.path.join(root, repo_name, '.hg', 'hgrc')
55 57
56 '''
57 )
58 # cleanup custom hgrc file
59 if os.path.isfile(hgrc_custom):
60 with open(hgrc_custom, 'wb') as f:
61 f.write('')
62 log.debug('Cleanup custom hgrc file under %s', hgrc_custom)
58 63
64 # write temp
59 65 with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file:
60 66 hooks_env_file.write(content)
61 root = self.server.get_root_store()
62 67
63 hgrc_custom = os.path.join(
64 root, self.server.repo_name, '.hg', 'hgrc_rhodecode')
65 log.debug('Wrote custom hgrc file under %s', hgrc_custom)
66 shutil.move(
67 self.hooks_env_path, hgrc_custom)
68
69 hgrc_main = os.path.join(
70 root, self.server.repo_name, '.hg', 'hgrc')
71 include_marker = '%include hgrc_rhodecode'
68 return self.hooks_env_path
72 69
73 if not os.path.isfile(hgrc_main):
74 os.mknod(hgrc_main)
75
76 with open(hgrc_main, 'rb') as f:
77 data = f.read()
78 has_marker = include_marker in data
70 def remove_configs(self):
71 os.remove(self.hooks_env_path)
79 72
80 if not has_marker:
81 log.debug('Adding include marker for hooks')
82 with open(hgrc_main, 'wa') as f:
83 f.write(textwrap.dedent('''
84 # added by RhodeCode
85 {}
86 '''.format(include_marker)))
87
88 def command(self):
73 def command(self, hgrc_path):
89 74 root = self.server.get_root_store()
90 75
91 76 command = (
92 "cd {root}; {hg_path} -R {root}{repo_name} "
77 "cd {root}; HGRCPATH={hgrc} {hg_path} -R {root}{repo_name} "
93 78 "serve --stdio".format(
94 79 root=root, hg_path=self.server.hg_path,
95 repo_name=self.server.repo_name))
80 repo_name=self.server.repo_name, hgrc=hgrc_path))
96 81 log.debug("Final CMD: %s", command)
97 82 return command
98 83
99 84 def run(self, extras):
100 85 # at this point we cannot tell, we do further ACL checks
101 86 # inside the hooks
102 87 action = '?'
103 88 # permissions are check via `pre_push_ssh_auth` hook
104 89 self.server.update_environment(action=action, extras=extras)
105 self.create_hooks_env()
106 return os.system(self.command())
90 custom_hgrc_file = self.create_hooks_env()
91
92 try:
93 return os.system(self.command(custom_hgrc_file))
94 finally:
95 self.remove_configs()
107 96
108 97
109 98 class MercurialServer(VcsServer):
110 99 backend = 'hg'
100 cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks']
111 101
112 def __init__(self, store, ini_path, repo_name,
113 user, user_permissions, config, env):
114 super(MercurialServer, self).\
115 __init__(user, user_permissions, config, env)
102 def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env):
103 super(MercurialServer, self).__init__(user, user_permissions, config, env)
116 104
117 105 self.store = store
118 106 self.ini_path = ini_path
119 107 self.repo_name = repo_name
120 self._path = self.hg_path = config.get(
121 'app:main', 'ssh.executable.hg')
108 self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg')
109 self.tunnel = MercurialTunnelWrapper(server=self)
110
111 def config_to_hgrc(self, repo_name):
112 ui_sections = collections.defaultdict(list)
113 ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
114
115 # write default hooks
116 default_hooks = [
117 ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
118 ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
119 ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
120
121 ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
122 ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
123 ]
124
125 for k, v in default_hooks:
126 ui_sections['hooks'].append((k, v))
122 127
123 self.tunnel = MercurialTunnelWrapper(server=self)
128 for entry in ui:
129 if not entry.active:
130 continue
131 sec = entry.section
132 key = entry.key
133
134 if sec in self.cli_flags:
135 # we want only custom hooks, so we skip builtins
136 if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
137 continue
138
139 ui_sections[sec].append([key, entry.value])
140
141 flags = []
142 for _sec, key_val in ui_sections.items():
143 flags.append(' ')
144 flags.append('[{}]'.format(_sec))
145 for key, val in key_val:
146 flags.append('{}= {}'.format(key, val))
147 return flags
@@ -1,116 +1,119 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import os
21 22 import mock
22 23 import pytest
23 24
24 25 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
25 26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26 27
27 28
28 29 class MercurialServerCreator(object):
29 30 root = '/tmp/repo/path/'
30 31 hg_path = '/usr/local/bin/hg'
31 32
32 33 config_data = {
33 34 'app:main': {
34 35 'ssh.executable.hg': hg_path,
35 36 'vcs.hooks.protocol': 'http',
36 37 }
37 38 }
38 39 repo_name = 'test_hg'
39 40 user = plain_dummy_user()
40 41
41 42 def __init__(self):
42 43 def config_get(part, key):
43 44 return self.config_data.get(part, {}).get(key)
44 45 self.config_mock = mock.Mock()
45 46 self.config_mock.get = mock.Mock(side_effect=config_get)
46 47
47 48 def create(self, **kwargs):
48 49 parameters = {
49 50 'store': self.root,
50 51 'ini_path': '',
51 52 'user': self.user,
52 53 'repo_name': self.repo_name,
53 54 'user_permissions': {
54 55 'test_hg': 'repository.admin'
55 56 },
56 57 'config': self.config_mock,
57 58 'env': plain_dummy_env()
58 59 }
59 60 parameters.update(kwargs)
60 61 server = MercurialServer(**parameters)
61 62 return server
62 63
63 64
64 65 @pytest.fixture
65 66 def hg_server(app):
66 67 return MercurialServerCreator()
67 68
68 69
69 70 class TestMercurialServer(object):
70 71
71 def test_command(self, hg_server):
72 def test_command(self, hg_server, tmpdir):
72 73 server = hg_server.create()
74 custom_hgrc = os.path.join(str(tmpdir), 'hgrc')
73 75 expected_command = (
74 'cd {root}; {hg_path} -R {root}{repo_name} serve --stdio'.format(
75 root=hg_server.root, hg_path=hg_server.hg_path,
76 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format(
77 root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path,
76 78 repo_name=hg_server.repo_name)
77 79 )
78 assert expected_command == server.tunnel.command()
80 server_command = server.tunnel.command(custom_hgrc)
81 assert expected_command == server_command
79 82
80 83 @pytest.mark.parametrize('permissions, action, code', [
81 84 ({}, 'pull', -2),
82 85 ({'test_hg': 'repository.read'}, 'pull', 0),
83 86 ({'test_hg': 'repository.read'}, 'push', -2),
84 87 ({'test_hg': 'repository.write'}, 'push', 0),
85 88 ({'test_hg': 'repository.admin'}, 'push', 0),
86 89
87 90 ])
88 91 def test_permission_checks(self, hg_server, permissions, action, code):
89 92 server = hg_server.create(user_permissions=permissions)
90 93 result = server._check_permissions(action)
91 94 assert result is code
92 95
93 96 @pytest.mark.parametrize('permissions, value', [
94 97 ({}, False),
95 98 ({'test_hg': 'repository.read'}, False),
96 99 ({'test_hg': 'repository.write'}, True),
97 100 ({'test_hg': 'repository.admin'}, True),
98 101
99 102 ])
100 103 def test_has_write_permissions(self, hg_server, permissions, value):
101 104 server = hg_server.create(user_permissions=permissions)
102 105 result = server.has_write_perm()
103 106 assert result is value
104 107
105 108 def test_run_returns_executes_command(self, hg_server):
106 109 server = hg_server.create()
107 110 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
108 111 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
109 112 _patch.return_value = 0
110 113 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
111 114 exit_code = server.run()
112 115
113 116 assert exit_code == (0, False)
114 117
115 118
116 119
@@ -1,459 +1,463 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import deform
22 22 import logging
23 23 import peppercorn
24 24 import webhelpers.paginate
25 25
26 26 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPNotFound
27 27
28 28 from rhodecode.integrations import integration_type_registry
29 29 from rhodecode.apps._base import BaseAppView
30 30 from rhodecode.apps._base.navigation import navigation_list
31 31 from rhodecode.lib.auth import (
32 32 LoginRequired, CSRFRequired, HasPermissionAnyDecorator,
33 33 HasRepoPermissionAnyDecorator, HasRepoGroupPermissionAnyDecorator)
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.model.db import Repository, RepoGroup, Session, Integration
37 37 from rhodecode.model.scm import ScmModel
38 38 from rhodecode.model.integration import IntegrationModel
39 39 from rhodecode.model.validation_schema.schemas.integration_schema import (
40 40 make_integration_schema, IntegrationScopeType)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 class IntegrationSettingsViewBase(BaseAppView):
46 46 """
47 47 Base Integration settings view used by both repo / global settings
48 48 """
49 49
50 50 def __init__(self, context, request):
51 51 super(IntegrationSettingsViewBase, self).__init__(context, request)
52 52 self._load_view_context()
53 53
54 54 def _load_view_context(self):
55 55 """
56 56 This avoids boilerplate for repo/global+list/edit+views/templates
57 57 by doing all possible contexts at the same time however it should
58 58 be split up into separate functions once more "contexts" exist
59 59 """
60 60
61 61 self.IntegrationType = None
62 62 self.repo = None
63 63 self.repo_group = None
64 64 self.integration = None
65 65 self.integrations = {}
66 66
67 67 request = self.request
68 68
69 69 if 'repo_name' in request.matchdict: # in repo settings context
70 70 repo_name = request.matchdict['repo_name']
71 71 self.repo = Repository.get_by_repo_name(repo_name)
72 72
73 73 if 'repo_group_name' in request.matchdict: # in group settings context
74 74 repo_group_name = request.matchdict['repo_group_name']
75 75 self.repo_group = RepoGroup.get_by_group_name(repo_group_name)
76 76
77 77 if 'integration' in request.matchdict: # integration type context
78 78 integration_type = request.matchdict['integration']
79 79 if integration_type not in integration_type_registry:
80 80 raise HTTPNotFound()
81 81
82 82 self.IntegrationType = integration_type_registry[integration_type]
83 83 if self.IntegrationType.is_dummy:
84 84 raise HTTPNotFound()
85 85
86 86 if 'integration_id' in request.matchdict: # single integration context
87 87 integration_id = request.matchdict['integration_id']
88 88 self.integration = Integration.get(integration_id)
89 89
90 90 # extra perms check just in case
91 91 if not self._has_perms_for_integration(self.integration):
92 92 raise HTTPForbidden()
93 93
94 94 self.settings = self.integration and self.integration.settings or {}
95 95 self.admin_view = not (self.repo or self.repo_group)
96 96
97 97 def _has_perms_for_integration(self, integration):
98 98 perms = self.request.user.permissions
99 99
100 100 if 'hg.admin' in perms['global']:
101 101 return True
102 102
103 103 if integration.repo:
104 104 return perms['repositories'].get(
105 105 integration.repo.repo_name) == 'repository.admin'
106 106
107 107 if integration.repo_group:
108 108 return perms['repositories_groups'].get(
109 109 integration.repo_group.group_name) == 'group.admin'
110 110
111 111 return False
112 112
113 113 def _get_local_tmpl_context(self, include_app_defaults=True):
114 114 _ = self.request.translate
115 115 c = super(IntegrationSettingsViewBase, self)._get_local_tmpl_context(
116 116 include_app_defaults=include_app_defaults)
117
118 117 c.active = 'integrations'
119 118
120 119 return c
121 120
122 121 def _form_schema(self):
123 122 schema = make_integration_schema(IntegrationType=self.IntegrationType,
124 123 settings=self.settings)
125 124
126 125 # returns a clone, important if mutating the schema later
127 126 return schema.bind(
128 127 permissions=self.request.user.permissions,
129 128 no_scope=not self.admin_view)
130 129
131 130 def _form_defaults(self):
132 131 _ = self.request.translate
133 132 defaults = {}
134 133
135 134 if self.integration:
136 135 defaults['settings'] = self.integration.settings or {}
137 136 defaults['options'] = {
138 137 'name': self.integration.name,
139 138 'enabled': self.integration.enabled,
140 139 'scope': {
141 140 'repo': self.integration.repo,
142 141 'repo_group': self.integration.repo_group,
143 142 'child_repos_only': self.integration.child_repos_only,
144 143 },
145 144 }
146 145 else:
147 146 if self.repo:
148 147 scope = _('{repo_name} repository').format(
149 148 repo_name=self.repo.repo_name)
150 149 elif self.repo_group:
151 150 scope = _('{repo_group_name} repo group').format(
152 151 repo_group_name=self.repo_group.group_name)
153 152 else:
154 153 scope = _('Global')
155 154
156 155 defaults['options'] = {
157 156 'enabled': True,
158 157 'name': _('{name} integration').format(
159 158 name=self.IntegrationType.display_name),
160 159 }
161 160 defaults['options']['scope'] = {
162 161 'repo': self.repo,
163 162 'repo_group': self.repo_group,
164 163 }
165 164
166 165 return defaults
167 166
168 167 def _delete_integration(self, integration):
169 168 _ = self.request.translate
170 169 Session().delete(integration)
171 170 Session().commit()
172 171 h.flash(
173 172 _('Integration {integration_name} deleted successfully.').format(
174 173 integration_name=integration.name),
175 174 category='success')
176 175
177 176 if self.repo:
178 177 redirect_to = self.request.route_path(
179 178 'repo_integrations_home', repo_name=self.repo.repo_name)
180 179 elif self.repo_group:
181 180 redirect_to = self.request.route_path(
182 181 'repo_group_integrations_home',
183 182 repo_group_name=self.repo_group.group_name)
184 183 else:
185 184 redirect_to = self.request.route_path('global_integrations_home')
186 185 raise HTTPFound(redirect_to)
187 186
188 187 def _integration_list(self):
189 188 """ List integrations """
190 189
191 190 c = self.load_default_context()
192 191 if self.repo:
193 192 scope = self.repo
194 193 elif self.repo_group:
195 194 scope = self.repo_group
196 195 else:
197 196 scope = 'all'
198 197
199 198 integrations = []
200 199
201 200 for IntType, integration in IntegrationModel().get_integrations(
202 201 scope=scope, IntegrationType=self.IntegrationType):
203 202
204 203 # extra permissions check *just in case*
205 204 if not self._has_perms_for_integration(integration):
206 205 continue
207 206
208 207 integrations.append((IntType, integration))
209 208
210 209 sort_arg = self.request.GET.get('sort', 'name:asc')
211 210 sort_dir = 'asc'
212 211 if ':' in sort_arg:
213 212 sort_field, sort_dir = sort_arg.split(':')
214 213 else:
215 214 sort_field = sort_arg, 'asc'
216 215
217 216 assert sort_field in ('name', 'integration_type', 'enabled', 'scope')
218 217
219 218 integrations.sort(
220 219 key=lambda x: getattr(x[1], sort_field),
221 220 reverse=(sort_dir == 'desc'))
222 221
223 222 page_url = webhelpers.paginate.PageURL(
224 223 self.request.path, self.request.GET)
225 224 page = safe_int(self.request.GET.get('page', 1), 1)
226 225
227 226 integrations = h.Page(
228 227 integrations, page=page, items_per_page=10, url=page_url)
229 228
230 229 c.rev_sort_dir = sort_dir != 'desc' and 'desc' or 'asc'
231 230
232 231 c.current_IntegrationType = self.IntegrationType
233 232 c.integrations_list = integrations
234 233 c.available_integrations = integration_type_registry
235 234
236 235 return self._get_template_context(c)
237 236
238 237 def _settings_get(self, defaults=None, form=None):
239 238 """
240 239 View that displays the integration settings as a form.
241 240 """
242 241 c = self.load_default_context()
243 242
244 243 defaults = defaults or self._form_defaults()
245 244 schema = self._form_schema()
246 245
247 246 if self.integration:
248 247 buttons = ('submit', 'delete')
249 248 else:
250 249 buttons = ('submit',)
251 250
252 251 form = form or deform.Form(schema, appstruct=defaults, buttons=buttons)
253 252
254 253 c.form = form
255 254 c.current_IntegrationType = self.IntegrationType
256 255 c.integration = self.integration
257 256
258 257 return self._get_template_context(c)
259 258
260 259 def _settings_post(self):
261 260 """
262 261 View that validates and stores the integration settings.
263 262 """
264 263 _ = self.request.translate
265 264
266 265 controls = self.request.POST.items()
267 266 pstruct = peppercorn.parse(controls)
268 267
269 268 if self.integration and pstruct.get('delete'):
270 269 return self._delete_integration(self.integration)
271 270
272 271 schema = self._form_schema()
273 272
274 273 skip_settings_validation = False
275 274 if self.integration and 'enabled' not in pstruct.get('options', {}):
276 275 skip_settings_validation = True
277 276 schema['settings'].validator = None
278 277 for field in schema['settings'].children:
279 278 field.validator = None
280 279 field.missing = ''
281 280
282 281 if self.integration:
283 282 buttons = ('submit', 'delete')
284 283 else:
285 284 buttons = ('submit',)
286 285
287 286 form = deform.Form(schema, buttons=buttons)
288 287
289 288 if not self.admin_view:
290 289 # scope is read only field in these cases, and has to be added
291 290 options = pstruct.setdefault('options', {})
292 291 if 'scope' not in options:
293 292 options['scope'] = IntegrationScopeType().serialize(None, {
294 293 'repo': self.repo,
295 294 'repo_group': self.repo_group,
296 295 })
297 296
298 297 try:
299 298 valid_data = form.validate_pstruct(pstruct)
300 299 except deform.ValidationFailure as e:
301 300 h.flash(
302 301 _('Errors exist when saving integration settings. '
303 302 'Please check the form inputs.'),
304 303 category='error')
305 304 return self._settings_get(form=e)
306 305
307 306 if not self.integration:
308 307 self.integration = Integration()
309 308 self.integration.integration_type = self.IntegrationType.key
310 309 Session().add(self.integration)
311 310
312 311 scope = valid_data['options']['scope']
313 312
314 313 IntegrationModel().update_integration(self.integration,
315 314 name=valid_data['options']['name'],
316 315 enabled=valid_data['options']['enabled'],
317 316 settings=valid_data['settings'],
318 317 repo=scope['repo'],
319 318 repo_group=scope['repo_group'],
320 319 child_repos_only=scope['child_repos_only'],
321 320 )
322 321
323 322 self.integration.settings = valid_data['settings']
324 323 Session().commit()
325 324 # Display success message and redirect.
326 325 h.flash(
327 326 _('Integration {integration_name} updated successfully.').format(
328 327 integration_name=self.IntegrationType.display_name),
329 328 category='success')
330 329
331 330 # if integration scope changes, we must redirect to the right place
332 331 # keeping in mind if the original view was for /repo/ or /_admin/
333 332 admin_view = not (self.repo or self.repo_group)
334 333
335 334 if self.integration.repo and not admin_view:
336 335 redirect_to = self.request.route_path(
337 336 'repo_integrations_edit',
338 337 repo_name=self.integration.repo.repo_name,
339 338 integration=self.integration.integration_type,
340 339 integration_id=self.integration.integration_id)
341 340 elif self.integration.repo_group and not admin_view:
342 341 redirect_to = self.request.route_path(
343 342 'repo_group_integrations_edit',
344 343 repo_group_name=self.integration.repo_group.group_name,
345 344 integration=self.integration.integration_type,
346 345 integration_id=self.integration.integration_id)
347 346 else:
348 347 redirect_to = self.request.route_path(
349 348 'global_integrations_edit',
350 349 integration=self.integration.integration_type,
351 350 integration_id=self.integration.integration_id)
352 351
353 352 return HTTPFound(redirect_to)
354 353
355 354 def _new_integration(self):
356 355 c = self.load_default_context()
357 356 c.available_integrations = integration_type_registry
358 357 return self._get_template_context(c)
359 358
360 359 def load_default_context(self):
361 360 raise NotImplementedError()
362 361
363 362
364 363 class GlobalIntegrationsView(IntegrationSettingsViewBase):
365 364 def load_default_context(self):
366 365 c = self._get_local_tmpl_context()
367 366 c.repo = self.repo
368 367 c.repo_group = self.repo_group
369 368 c.navlist = navigation_list(self.request)
370 369
371 370 return c
372 371
373 372 @LoginRequired()
374 373 @HasPermissionAnyDecorator('hg.admin')
375 374 def integration_list(self):
376 375 return self._integration_list()
377 376
378 377 @LoginRequired()
379 378 @HasPermissionAnyDecorator('hg.admin')
380 379 def settings_get(self):
381 380 return self._settings_get()
382 381
383 382 @LoginRequired()
384 383 @HasPermissionAnyDecorator('hg.admin')
385 384 @CSRFRequired()
386 385 def settings_post(self):
387 386 return self._settings_post()
388 387
389 388 @LoginRequired()
390 389 @HasPermissionAnyDecorator('hg.admin')
391 390 def new_integration(self):
392 391 return self._new_integration()
393 392
394 393
395 394 class RepoIntegrationsView(IntegrationSettingsViewBase):
396 395 def load_default_context(self):
397 396 c = self._get_local_tmpl_context()
398 397
399 398 c.repo = self.repo
400 399 c.repo_group = self.repo_group
401 400
402 401 self.db_repo = self.repo
403 402 c.rhodecode_db_repo = self.repo
404 403 c.repo_name = self.db_repo.repo_name
405 404 c.repository_pull_requests = ScmModel().get_pull_requests(self.repo)
406 405
406 c.has_origin_repo_read_perm = False
407 if self.db_repo.fork:
408 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
409 'repository.write', 'repository.read', 'repository.admin')(
410 self.db_repo.fork.repo_name, 'summary fork link')
407 411 return c
408 412
409 413 @LoginRequired()
410 414 @HasRepoPermissionAnyDecorator('repository.admin')
411 415 def integration_list(self):
412 416 return self._integration_list()
413 417
414 418 @LoginRequired()
415 419 @HasRepoPermissionAnyDecorator('repository.admin')
416 420 def settings_get(self):
417 421 return self._settings_get()
418 422
419 423 @LoginRequired()
420 424 @HasRepoPermissionAnyDecorator('repository.admin')
421 425 @CSRFRequired()
422 426 def settings_post(self):
423 427 return self._settings_post()
424 428
425 429 @LoginRequired()
426 430 @HasRepoPermissionAnyDecorator('repository.admin')
427 431 def new_integration(self):
428 432 return self._new_integration()
429 433
430 434
431 435 class RepoGroupIntegrationsView(IntegrationSettingsViewBase):
432 436 def load_default_context(self):
433 437 c = self._get_local_tmpl_context()
434 438 c.repo = self.repo
435 439 c.repo_group = self.repo_group
436 440 c.navlist = navigation_list(self.request)
437 441
438 442 return c
439 443
440 444 @LoginRequired()
441 445 @HasRepoGroupPermissionAnyDecorator('group.admin')
442 446 def integration_list(self):
443 447 return self._integration_list()
444 448
445 449 @LoginRequired()
446 450 @HasRepoGroupPermissionAnyDecorator('group.admin')
447 451 def settings_get(self):
448 452 return self._settings_get()
449 453
450 454 @LoginRequired()
451 455 @HasRepoGroupPermissionAnyDecorator('group.admin')
452 456 @CSRFRequired()
453 457 def settings_post(self):
454 458 return self._settings_post()
455 459
456 460 @LoginRequired()
457 461 @HasRepoGroupPermissionAnyDecorator('group.admin')
458 462 def new_integration(self):
459 463 return self._new_integration()
@@ -1,624 +1,645 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database creation, and setup module for RhodeCode Enterprise. Used for creation
23 23 of database as well as for migration operations
24 24 """
25 25
26 26 import os
27 27 import sys
28 28 import time
29 29 import uuid
30 30 import logging
31 31 import getpass
32 32 from os.path import dirname as dn, join as jn
33 33
34 34 from sqlalchemy.engine import create_engine
35 35
36 36 from rhodecode import __dbversion__
37 37 from rhodecode.model import init_model
38 38 from rhodecode.model.user import UserModel
39 39 from rhodecode.model.db import (
40 40 User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm,
41 41 DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository)
42 42 from rhodecode.model.meta import Session, Base
43 43 from rhodecode.model.permission import PermissionModel
44 44 from rhodecode.model.repo import RepoModel
45 45 from rhodecode.model.repo_group import RepoGroupModel
46 46 from rhodecode.model.settings import SettingsModel
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 def notify(msg):
53 53 """
54 54 Notification for migrations messages
55 55 """
56 56 ml = len(msg) + (4 * 2)
57 57 print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper())
58 58
59 59
60 60 class DbManage(object):
61 61
62 62 def __init__(self, log_sql, dbconf, root, tests=False,
63 63 SESSION=None, cli_args=None):
64 64 self.dbname = dbconf.split('/')[-1]
65 65 self.tests = tests
66 66 self.root = root
67 67 self.dburi = dbconf
68 68 self.log_sql = log_sql
69 69 self.db_exists = False
70 70 self.cli_args = cli_args or {}
71 71 self.init_db(SESSION=SESSION)
72 72 self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask'))
73 73
74 74 def get_ask_ok_func(self, param):
75 75 if param not in [None]:
76 76 # return a function lambda that has a default set to param
77 77 return lambda *args, **kwargs: param
78 78 else:
79 79 from rhodecode.lib.utils import ask_ok
80 80 return ask_ok
81 81
82 82 def init_db(self, SESSION=None):
83 83 if SESSION:
84 84 self.sa = SESSION
85 85 else:
86 86 # init new sessions
87 87 engine = create_engine(self.dburi, echo=self.log_sql)
88 88 init_model(engine)
89 89 self.sa = Session()
90 90
91 91 def create_tables(self, override=False):
92 92 """
93 93 Create a auth database
94 94 """
95 95
96 96 log.info("Existing database with the same name is going to be destroyed.")
97 97 log.info("Setup command will run DROP ALL command on that database.")
98 98 if self.tests:
99 99 destroy = True
100 100 else:
101 101 destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]')
102 102 if not destroy:
103 103 log.info('Nothing done.')
104 104 sys.exit(0)
105 105 if destroy:
106 106 Base.metadata.drop_all()
107 107
108 108 checkfirst = not override
109 109 Base.metadata.create_all(checkfirst=checkfirst)
110 110 log.info('Created tables for %s', self.dbname)
111 111
112 112 def set_db_version(self):
113 113 ver = DbMigrateVersion()
114 114 ver.version = __dbversion__
115 115 ver.repository_id = 'rhodecode_db_migrations'
116 116 ver.repository_path = 'versions'
117 117 self.sa.add(ver)
118 118 log.info('db version set to: %s', __dbversion__)
119 119
120 120 def run_pre_migration_tasks(self):
121 121 """
122 122 Run various tasks before actually doing migrations
123 123 """
124 124 # delete cache keys on each upgrade
125 125 total = CacheKey.query().count()
126 126 log.info("Deleting (%s) cache keys now...", total)
127 127 CacheKey.delete_all_cache()
128 128
129 129 def upgrade(self, version=None):
130 130 """
131 131 Upgrades given database schema to given revision following
132 132 all needed steps, to perform the upgrade
133 133
134 134 """
135 135
136 136 from rhodecode.lib.dbmigrate.migrate.versioning import api
137 137 from rhodecode.lib.dbmigrate.migrate.exceptions import \
138 138 DatabaseNotControlledError
139 139
140 140 if 'sqlite' in self.dburi:
141 141 print(
142 142 '********************** WARNING **********************\n'
143 143 'Make sure your version of sqlite is at least 3.7.X. \n'
144 144 'Earlier versions are known to fail on some migrations\n'
145 145 '*****************************************************\n')
146 146
147 147 upgrade = self.ask_ok(
148 148 'You are about to perform a database upgrade. Make '
149 149 'sure you have backed up your database. '
150 150 'Continue ? [y/n]')
151 151 if not upgrade:
152 152 log.info('No upgrade performed')
153 153 sys.exit(0)
154 154
155 155 repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
156 156 'rhodecode/lib/dbmigrate')
157 157 db_uri = self.dburi
158 158
159 159 if version:
160 160 DbMigrateVersion.set_version(version)
161 161
162 162 try:
163 163 curr_version = api.db_version(db_uri, repository_path)
164 164 msg = ('Found current database db_uri under version '
165 165 'control with version {}'.format(curr_version))
166 166
167 167 except (RuntimeError, DatabaseNotControlledError):
168 168 curr_version = 1
169 169 msg = ('Current database is not under version control. Setting '
170 170 'as version %s' % curr_version)
171 171 api.version_control(db_uri, repository_path, curr_version)
172 172
173 173 notify(msg)
174 174
175 175 self.run_pre_migration_tasks()
176 176
177 177 if curr_version == __dbversion__:
178 178 log.info('This database is already at the newest version')
179 179 sys.exit(0)
180 180
181 181 upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
182 182 notify('attempting to upgrade database from '
183 183 'version %s to version %s' % (curr_version, __dbversion__))
184 184
185 185 # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
186 186 _step = None
187 187 for step in upgrade_steps:
188 188 notify('performing upgrade step %s' % step)
189 189 time.sleep(0.5)
190 190
191 191 api.upgrade(db_uri, repository_path, step)
192 192 self.sa.rollback()
193 193 notify('schema upgrade for step %s completed' % (step,))
194 194
195 195 _step = step
196 196
197 197 notify('upgrade to version %s successful' % _step)
198 198
199 199 def fix_repo_paths(self):
200 200 """
201 201 Fixes an old RhodeCode version path into new one without a '*'
202 202 """
203 203
204 204 paths = self.sa.query(RhodeCodeUi)\
205 205 .filter(RhodeCodeUi.ui_key == '/')\
206 206 .scalar()
207 207
208 208 paths.ui_value = paths.ui_value.replace('*', '')
209 209
210 210 try:
211 211 self.sa.add(paths)
212 212 self.sa.commit()
213 213 except Exception:
214 214 self.sa.rollback()
215 215 raise
216 216
217 217 def fix_default_user(self):
218 218 """
219 219 Fixes an old default user with some 'nicer' default values,
220 220 used mostly for anonymous access
221 221 """
222 222 def_user = self.sa.query(User)\
223 223 .filter(User.username == User.DEFAULT_USER)\
224 224 .one()
225 225
226 226 def_user.name = 'Anonymous'
227 227 def_user.lastname = 'User'
228 228 def_user.email = User.DEFAULT_USER_EMAIL
229 229
230 230 try:
231 231 self.sa.add(def_user)
232 232 self.sa.commit()
233 233 except Exception:
234 234 self.sa.rollback()
235 235 raise
236 236
237 237 def fix_settings(self):
238 238 """
239 239 Fixes rhodecode settings and adds ga_code key for google analytics
240 240 """
241 241
242 242 hgsettings3 = RhodeCodeSetting('ga_code', '')
243 243
244 244 try:
245 245 self.sa.add(hgsettings3)
246 246 self.sa.commit()
247 247 except Exception:
248 248 self.sa.rollback()
249 249 raise
250 250
251 251 def create_admin_and_prompt(self):
252 252
253 253 # defaults
254 254 defaults = self.cli_args
255 255 username = defaults.get('username')
256 256 password = defaults.get('password')
257 257 email = defaults.get('email')
258 258
259 259 if username is None:
260 260 username = raw_input('Specify admin username:')
261 261 if password is None:
262 262 password = self._get_admin_password()
263 263 if not password:
264 264 # second try
265 265 password = self._get_admin_password()
266 266 if not password:
267 267 sys.exit()
268 268 if email is None:
269 269 email = raw_input('Specify admin email:')
270 270 api_key = self.cli_args.get('api_key')
271 271 self.create_user(username, password, email, True,
272 272 strict_creation_check=False,
273 273 api_key=api_key)
274 274
275 275 def _get_admin_password(self):
276 276 password = getpass.getpass('Specify admin password '
277 277 '(min 6 chars):')
278 278 confirm = getpass.getpass('Confirm password:')
279 279
280 280 if password != confirm:
281 281 log.error('passwords mismatch')
282 282 return False
283 283 if len(password) < 6:
284 284 log.error('password is too short - use at least 6 characters')
285 285 return False
286 286
287 287 return password
288 288
289 289 def create_test_admin_and_users(self):
290 290 log.info('creating admin and regular test users')
291 291 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \
292 292 TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
293 293 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
294 294 TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
295 295 TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
296 296
297 297 self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
298 298 TEST_USER_ADMIN_EMAIL, True, api_key=True)
299 299
300 300 self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
301 301 TEST_USER_REGULAR_EMAIL, False, api_key=True)
302 302
303 303 self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
304 304 TEST_USER_REGULAR2_EMAIL, False, api_key=True)
305 305
306 306 def create_ui_settings(self, repo_store_path):
307 307 """
308 308 Creates ui settings, fills out hooks
309 309 and disables dotencode
310 310 """
311 311 settings_model = SettingsModel(sa=self.sa)
312 312 from rhodecode.lib.vcs.backends.hg import largefiles_store
313 313 from rhodecode.lib.vcs.backends.git import lfs_store
314 314
315 315 # Build HOOKS
316 316 hooks = [
317 317 (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'),
318 318
319 319 # HG
320 320 (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'),
321 321 (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'),
322 322 (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'),
323 323 (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'),
324 324 (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'),
325 325 (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'),
326 326
327 327 ]
328 328
329 329 for key, value in hooks:
330 330 hook_obj = settings_model.get_ui_by_key(key)
331 331 hooks2 = hook_obj if hook_obj else RhodeCodeUi()
332 332 hooks2.ui_section = 'hooks'
333 333 hooks2.ui_key = key
334 334 hooks2.ui_value = value
335 335 self.sa.add(hooks2)
336 336
337 337 # enable largefiles
338 338 largefiles = RhodeCodeUi()
339 339 largefiles.ui_section = 'extensions'
340 340 largefiles.ui_key = 'largefiles'
341 341 largefiles.ui_value = ''
342 342 self.sa.add(largefiles)
343 343
344 344 # set default largefiles cache dir, defaults to
345 345 # /repo_store_location/.cache/largefiles
346 346 largefiles = RhodeCodeUi()
347 347 largefiles.ui_section = 'largefiles'
348 348 largefiles.ui_key = 'usercache'
349 349 largefiles.ui_value = largefiles_store(repo_store_path)
350 350
351 351 self.sa.add(largefiles)
352 352
353 353 # set default lfs cache dir, defaults to
354 354 # /repo_store_location/.cache/lfs_store
355 355 lfsstore = RhodeCodeUi()
356 356 lfsstore.ui_section = 'vcs_git_lfs'
357 357 lfsstore.ui_key = 'store_location'
358 358 lfsstore.ui_value = lfs_store(repo_store_path)
359 359
360 360 self.sa.add(lfsstore)
361 361
362 362 # enable hgsubversion disabled by default
363 363 hgsubversion = RhodeCodeUi()
364 364 hgsubversion.ui_section = 'extensions'
365 365 hgsubversion.ui_key = 'hgsubversion'
366 366 hgsubversion.ui_value = ''
367 367 hgsubversion.ui_active = False
368 368 self.sa.add(hgsubversion)
369 369
370 370 # enable hgevolve disabled by default
371 371 hgevolve = RhodeCodeUi()
372 372 hgevolve.ui_section = 'extensions'
373 373 hgevolve.ui_key = 'evolve'
374 374 hgevolve.ui_value = ''
375 375 hgevolve.ui_active = False
376 376 self.sa.add(hgevolve)
377 377
378 hgevolve = RhodeCodeUi()
379 hgevolve.ui_section = 'experimental'
380 hgevolve.ui_key = 'evolution'
381 hgevolve.ui_value = ''
382 hgevolve.ui_active = False
383 self.sa.add(hgevolve)
384
385 hgevolve = RhodeCodeUi()
386 hgevolve.ui_section = 'experimental'
387 hgevolve.ui_key = 'evolution.exchange'
388 hgevolve.ui_value = ''
389 hgevolve.ui_active = False
390 self.sa.add(hgevolve)
391
392 hgevolve = RhodeCodeUi()
393 hgevolve.ui_section = 'extensions'
394 hgevolve.ui_key = 'topic'
395 hgevolve.ui_value = ''
396 hgevolve.ui_active = False
397 self.sa.add(hgevolve)
398
378 399 # enable hggit disabled by default
379 400 hggit = RhodeCodeUi()
380 401 hggit.ui_section = 'extensions'
381 402 hggit.ui_key = 'hggit'
382 403 hggit.ui_value = ''
383 404 hggit.ui_active = False
384 405 self.sa.add(hggit)
385 406
386 407 # set svn branch defaults
387 408 branches = ["/branches/*", "/trunk"]
388 409 tags = ["/tags/*"]
389 410
390 411 for branch in branches:
391 412 settings_model.create_ui_section_value(
392 413 RhodeCodeUi.SVN_BRANCH_ID, branch)
393 414
394 415 for tag in tags:
395 416 settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag)
396 417
397 418 def create_auth_plugin_options(self, skip_existing=False):
398 419 """
399 420 Create default auth plugin settings, and make it active
400 421
401 422 :param skip_existing:
402 423 """
403 424
404 425 for k, v, t in [('auth_plugins', 'egg:rhodecode-enterprise-ce#rhodecode', 'list'),
405 426 ('auth_rhodecode_enabled', 'True', 'bool')]:
406 427 if (skip_existing and
407 428 SettingsModel().get_setting_by_name(k) is not None):
408 429 log.debug('Skipping option %s', k)
409 430 continue
410 431 setting = RhodeCodeSetting(k, v, t)
411 432 self.sa.add(setting)
412 433
413 434 def create_default_options(self, skip_existing=False):
414 435 """Creates default settings"""
415 436
416 437 for k, v, t in [
417 438 ('default_repo_enable_locking', False, 'bool'),
418 439 ('default_repo_enable_downloads', False, 'bool'),
419 440 ('default_repo_enable_statistics', False, 'bool'),
420 441 ('default_repo_private', False, 'bool'),
421 442 ('default_repo_type', 'hg', 'unicode')]:
422 443
423 444 if (skip_existing and
424 445 SettingsModel().get_setting_by_name(k) is not None):
425 446 log.debug('Skipping option %s', k)
426 447 continue
427 448 setting = RhodeCodeSetting(k, v, t)
428 449 self.sa.add(setting)
429 450
430 451 def fixup_groups(self):
431 452 def_usr = User.get_default_user()
432 453 for g in RepoGroup.query().all():
433 454 g.group_name = g.get_new_name(g.name)
434 455 self.sa.add(g)
435 456 # get default perm
436 457 default = UserRepoGroupToPerm.query()\
437 458 .filter(UserRepoGroupToPerm.group == g)\
438 459 .filter(UserRepoGroupToPerm.user == def_usr)\
439 460 .scalar()
440 461
441 462 if default is None:
442 463 log.debug('missing default permission for group %s adding', g)
443 464 perm_obj = RepoGroupModel()._create_default_perms(g)
444 465 self.sa.add(perm_obj)
445 466
446 467 def reset_permissions(self, username):
447 468 """
448 469 Resets permissions to default state, useful when old systems had
449 470 bad permissions, we must clean them up
450 471
451 472 :param username:
452 473 """
453 474 default_user = User.get_by_username(username)
454 475 if not default_user:
455 476 return
456 477
457 478 u2p = UserToPerm.query()\
458 479 .filter(UserToPerm.user == default_user).all()
459 480 fixed = False
460 481 if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
461 482 for p in u2p:
462 483 Session().delete(p)
463 484 fixed = True
464 485 self.populate_default_permissions()
465 486 return fixed
466 487
467 488 def update_repo_info(self):
468 489 RepoModel.update_repoinfo()
469 490
470 491 def config_prompt(self, test_repo_path='', retries=3):
471 492 defaults = self.cli_args
472 493 _path = defaults.get('repos_location')
473 494 if retries == 3:
474 495 log.info('Setting up repositories config')
475 496
476 497 if _path is not None:
477 498 path = _path
478 499 elif not self.tests and not test_repo_path:
479 500 path = raw_input(
480 501 'Enter a valid absolute path to store repositories. '
481 502 'All repositories in that path will be added automatically:'
482 503 )
483 504 else:
484 505 path = test_repo_path
485 506 path_ok = True
486 507
487 508 # check proper dir
488 509 if not os.path.isdir(path):
489 510 path_ok = False
490 511 log.error('Given path %s is not a valid directory', path)
491 512
492 513 elif not os.path.isabs(path):
493 514 path_ok = False
494 515 log.error('Given path %s is not an absolute path', path)
495 516
496 517 # check if path is at least readable.
497 518 if not os.access(path, os.R_OK):
498 519 path_ok = False
499 520 log.error('Given path %s is not readable', path)
500 521
501 522 # check write access, warn user about non writeable paths
502 523 elif not os.access(path, os.W_OK) and path_ok:
503 524 log.warning('No write permission to given path %s', path)
504 525
505 526 q = ('Given path %s is not writeable, do you want to '
506 527 'continue with read only mode ? [y/n]' % (path,))
507 528 if not self.ask_ok(q):
508 529 log.error('Canceled by user')
509 530 sys.exit(-1)
510 531
511 532 if retries == 0:
512 533 sys.exit('max retries reached')
513 534 if not path_ok:
514 535 retries -= 1
515 536 return self.config_prompt(test_repo_path, retries)
516 537
517 538 real_path = os.path.normpath(os.path.realpath(path))
518 539
519 540 if real_path != os.path.normpath(path):
520 541 q = ('Path looks like a symlink, RhodeCode Enterprise will store '
521 542 'given path as %s ? [y/n]') % (real_path,)
522 543 if not self.ask_ok(q):
523 544 log.error('Canceled by user')
524 545 sys.exit(-1)
525 546
526 547 return real_path
527 548
528 549 def create_settings(self, path):
529 550
530 551 self.create_ui_settings(path)
531 552
532 553 ui_config = [
533 554 ('web', 'push_ssl', 'False'),
534 555 ('web', 'allow_archive', 'gz zip bz2'),
535 556 ('web', 'allow_push', '*'),
536 557 ('web', 'baseurl', '/'),
537 558 ('paths', '/', path),
538 559 ('phases', 'publish', 'True')
539 560 ]
540 561 for section, key, value in ui_config:
541 562 ui_conf = RhodeCodeUi()
542 563 setattr(ui_conf, 'ui_section', section)
543 564 setattr(ui_conf, 'ui_key', key)
544 565 setattr(ui_conf, 'ui_value', value)
545 566 self.sa.add(ui_conf)
546 567
547 568 # rhodecode app settings
548 569 settings = [
549 570 ('realm', 'RhodeCode', 'unicode'),
550 571 ('title', '', 'unicode'),
551 572 ('pre_code', '', 'unicode'),
552 573 ('post_code', '', 'unicode'),
553 574 ('show_public_icon', True, 'bool'),
554 575 ('show_private_icon', True, 'bool'),
555 576 ('stylify_metatags', False, 'bool'),
556 577 ('dashboard_items', 100, 'int'),
557 578 ('admin_grid_items', 25, 'int'),
558 579 ('show_version', True, 'bool'),
559 580 ('use_gravatar', False, 'bool'),
560 581 ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
561 582 ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
562 583 ('support_url', '', 'unicode'),
563 584 ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'),
564 585 ('show_revision_number', True, 'bool'),
565 586 ('show_sha_length', 12, 'int'),
566 587 ]
567 588
568 589 for key, val, type_ in settings:
569 590 sett = RhodeCodeSetting(key, val, type_)
570 591 self.sa.add(sett)
571 592
572 593 self.create_auth_plugin_options()
573 594 self.create_default_options()
574 595
575 596 log.info('created ui config')
576 597
577 598 def create_user(self, username, password, email='', admin=False,
578 599 strict_creation_check=True, api_key=None):
579 600 log.info('creating user `%s`', username)
580 601 user = UserModel().create_or_update(
581 602 username, password, email, firstname=u'RhodeCode', lastname=u'Admin',
582 603 active=True, admin=admin, extern_type="rhodecode",
583 604 strict_creation_check=strict_creation_check)
584 605
585 606 if api_key:
586 607 log.info('setting a new default auth token for user `%s`', username)
587 608 UserModel().add_auth_token(
588 609 user=user, lifetime_minutes=-1,
589 610 role=UserModel.auth_token_role.ROLE_ALL,
590 611 description=u'BUILTIN TOKEN')
591 612
592 613 def create_default_user(self):
593 614 log.info('creating default user')
594 615 # create default user for handling default permissions.
595 616 user = UserModel().create_or_update(username=User.DEFAULT_USER,
596 617 password=str(uuid.uuid1())[:20],
597 618 email=User.DEFAULT_USER_EMAIL,
598 619 firstname=u'Anonymous',
599 620 lastname=u'User',
600 621 strict_creation_check=False)
601 622 # based on configuration options activate/de-activate this user which
602 623 # controlls anonymous access
603 624 if self.cli_args.get('public_access') is False:
604 625 log.info('Public access disabled')
605 626 user.active = False
606 627 Session().add(user)
607 628 Session().commit()
608 629
609 630 def create_permissions(self):
610 631 """
611 632 Creates all permissions defined in the system
612 633 """
613 634 # module.(access|create|change|delete)_[name]
614 635 # module.(none|read|write|admin)
615 636 log.info('creating permissions')
616 637 PermissionModel(self.sa).create_permissions()
617 638
618 639 def populate_default_permissions(self):
619 640 """
620 641 Populate default permissions. It will create only the default
621 642 permissions that are missing, and not alter already defined ones
622 643 """
623 644 log.info('creating default user permissions')
624 645 PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER)
@@ -1,169 +1,169 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import webob
22 22 from pyramid.threadlocal import get_current_request
23 23
24 24 from rhodecode import events
25 25 from rhodecode.lib import hooks_base
26 26 from rhodecode.lib import utils2
27 27
28 28
29 29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
30 30 # TODO: johbo: Replace by vcs_operation_context and remove fully
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 check_locking = action in ('pull', 'push')
33 33
34 34 request = get_current_request()
35 35
36 36 # default
37 37 dummy_environ = webob.Request.blank('').environ
38 38 try:
39 39 environ = request.environ or dummy_environ
40 40 except TypeError:
41 41 # we might use this outside of request context
42 42 environ = dummy_environ
43 43
44 44 extras = vcs_operation_context(
45 45 environ, repo_name, username, action, repo_alias, check_locking)
46 46 return utils2.AttributeDict(extras)
47 47
48 48
49 49 def trigger_post_push_hook(
50 50 username, action, hook_type, repo_name, repo_alias, commit_ids):
51 51 """
52 52 Triggers push action hooks
53 53
54 54 :param username: username who pushes
55 55 :param action: push/push_local/push_remote
56 56 :param repo_name: name of repo
57 57 :param repo_alias: the type of SCM repo
58 58 :param commit_ids: list of commit ids that we pushed
59 59 """
60 60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
61 61 extras.commit_ids = commit_ids
62 62 extras.hook_type = hook_type
63 63 hooks_base.post_push(extras)
64 64
65 65
66 66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
67 67 pull_request, data=None):
68 68 """
69 69 Triggers create pull request action hooks
70 70
71 71 :param username: username who creates the pull request
72 72 :param repo_name: name of target repo
73 73 :param repo_alias: the type of SCM target repo
74 74 :param pull_request: the pull request that was created
75 75 :param data: extra data for specific events e.g {'comment': comment_obj}
76 76 """
77 77 if repo_alias not in ('hg', 'git'):
78 78 return
79 79
80 80 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
81 81 'create_pull_request')
82 82 events.trigger(events.PullRequestCreateEvent(pull_request))
83 extras.update(pull_request.get_api_data())
83 extras.update(pull_request.get_api_data(with_merge_state=False))
84 84 hooks_base.log_create_pull_request(**extras)
85 85
86 86
87 87 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
88 88 pull_request, data=None):
89 89 """
90 90 Triggers merge pull request action hooks
91 91
92 92 :param username: username who creates the pull request
93 93 :param repo_name: name of target repo
94 94 :param repo_alias: the type of SCM target repo
95 95 :param pull_request: the pull request that was merged
96 96 :param data: extra data for specific events e.g {'comment': comment_obj}
97 97 """
98 98 if repo_alias not in ('hg', 'git'):
99 99 return
100 100
101 101 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
102 102 'merge_pull_request')
103 103 events.trigger(events.PullRequestMergeEvent(pull_request))
104 104 extras.update(pull_request.get_api_data())
105 105 hooks_base.log_merge_pull_request(**extras)
106 106
107 107
108 108 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
109 109 pull_request, data=None):
110 110 """
111 111 Triggers close pull request action hooks
112 112
113 113 :param username: username who creates the pull request
114 114 :param repo_name: name of target repo
115 115 :param repo_alias: the type of SCM target repo
116 116 :param pull_request: the pull request that was closed
117 117 :param data: extra data for specific events e.g {'comment': comment_obj}
118 118 """
119 119 if repo_alias not in ('hg', 'git'):
120 120 return
121 121
122 122 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
123 123 'close_pull_request')
124 124 events.trigger(events.PullRequestCloseEvent(pull_request))
125 125 extras.update(pull_request.get_api_data())
126 126 hooks_base.log_close_pull_request(**extras)
127 127
128 128
129 129 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
130 130 pull_request, data=None):
131 131 """
132 132 Triggers review status change pull request action hooks
133 133
134 134 :param username: username who creates the pull request
135 135 :param repo_name: name of target repo
136 136 :param repo_alias: the type of SCM target repo
137 137 :param pull_request: the pull request that review status changed
138 138 :param data: extra data for specific events e.g {'comment': comment_obj}
139 139 """
140 140 if repo_alias not in ('hg', 'git'):
141 141 return
142 142
143 143 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
144 144 'review_pull_request')
145 145 status = data.get('status')
146 146 events.trigger(events.PullRequestReviewEvent(pull_request, status))
147 147 extras.update(pull_request.get_api_data())
148 148 hooks_base.log_review_pull_request(**extras)
149 149
150 150
151 151 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
152 152 pull_request, data=None):
153 153 """
154 154 Triggers update pull request action hooks
155 155
156 156 :param username: username who creates the pull request
157 157 :param repo_name: name of target repo
158 158 :param repo_alias: the type of SCM target repo
159 159 :param pull_request: the pull request that was updated
160 160 :param data: extra data for specific events e.g {'comment': comment_obj}
161 161 """
162 162 if repo_alias not in ('hg', 'git'):
163 163 return
164 164
165 165 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
166 166 'update_pull_request')
167 167 events.trigger(events.PullRequestUpdateEvent(pull_request))
168 168 extras.update(pull_request.get_api_data())
169 169 hooks_base.log_update_pull_request(**extras)
@@ -1,781 +1,782 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import sys
32 32 import shutil
33 33 import tempfile
34 34 import traceback
35 35 import tarfile
36 36 import warnings
37 37 import hashlib
38 38 from os.path import join as jn
39 39
40 40 import paste
41 41 import pkg_resources
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43 from mako import exceptions
44 44 from pyramid.threadlocal import get_current_registry
45 45 from rhodecode.lib.request import Request
46 46
47 47 from rhodecode.lib.vcs.backends.base import Config
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 50 from rhodecode.lib.utils2 import (
51 51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def repo_name_slug(value):
80 80 """
81 81 Return slug of name of repository
82 82 This function is called on each creation/modification
83 83 of repository to prevent bad names in repo
84 84 """
85 85 replacement_char = '-'
86 86
87 87 slug = remove_formatting(value)
88 88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 89 slug = re.sub('[\s]+', '-', slug)
90 90 slug = collapse(slug, replacement_char)
91 91 return slug
92 92
93 93
94 94 #==============================================================================
95 95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 96 #==============================================================================
97 97 def get_repo_slug(request):
98 98 _repo = ''
99 99
100 100 if hasattr(request, 'db_repo'):
101 101 # if our requests has set db reference use it for name, this
102 102 # translates the example.com/_<id> into proper repo names
103 103 _repo = request.db_repo.repo_name
104 104 elif getattr(request, 'matchdict', None):
105 105 # pyramid
106 106 _repo = request.matchdict.get('repo_name')
107 107
108 108 if _repo:
109 109 _repo = _repo.rstrip('/')
110 110 return _repo
111 111
112 112
113 113 def get_repo_group_slug(request):
114 114 _group = ''
115 115 if hasattr(request, 'db_repo_group'):
116 116 # if our requests has set db reference use it for name, this
117 117 # translates the example.com/_<id> into proper repo group names
118 118 _group = request.db_repo_group.group_name
119 119 elif getattr(request, 'matchdict', None):
120 120 # pyramid
121 121 _group = request.matchdict.get('repo_group_name')
122 122
123 123 if _group:
124 124 _group = _group.rstrip('/')
125 125 return _group
126 126
127 127
128 128 def get_user_group_slug(request):
129 129 _user_group = ''
130 130
131 131 if hasattr(request, 'db_user_group'):
132 132 _user_group = request.db_user_group.users_group_name
133 133 elif getattr(request, 'matchdict', None):
134 134 # pyramid
135 135 _user_group = request.matchdict.get('user_group_id')
136 136 _user_group_name = request.matchdict.get('user_group_name')
137 137 try:
138 138 if _user_group:
139 139 _user_group = UserGroup.get(_user_group)
140 140 elif _user_group_name:
141 141 _user_group = UserGroup.get_by_group_name(_user_group_name)
142 142
143 143 if _user_group:
144 144 _user_group = _user_group.users_group_name
145 145 except Exception:
146 146 log.exception('Failed to get user group by id and name')
147 147 # catch all failures here
148 148 return None
149 149
150 150 return _user_group
151 151
152 152
153 153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
154 154 """
155 155 Scans given path for repos and return (name,(type,path)) tuple
156 156
157 157 :param path: path to scan for repositories
158 158 :param recursive: recursive search and return names with subdirs in front
159 159 """
160 160
161 161 # remove ending slash for better results
162 162 path = path.rstrip(os.sep)
163 163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
164 164
165 165 def _get_repos(p):
166 166 dirpaths = _get_dirpaths(p)
167 167 if not _is_dir_writable(p):
168 168 log.warning('repo path without write access: %s', p)
169 169
170 170 for dirpath in dirpaths:
171 171 if os.path.isfile(os.path.join(p, dirpath)):
172 172 continue
173 173 cur_path = os.path.join(p, dirpath)
174 174
175 175 # skip removed repos
176 176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
177 177 continue
178 178
179 179 #skip .<somethin> dirs
180 180 if dirpath.startswith('.'):
181 181 continue
182 182
183 183 try:
184 184 scm_info = get_scm(cur_path)
185 185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
186 186 except VCSError:
187 187 if not recursive:
188 188 continue
189 189 #check if this dir containts other repos for recursive scan
190 190 rec_path = os.path.join(p, dirpath)
191 191 if os.path.isdir(rec_path):
192 192 for inner_scm in _get_repos(rec_path):
193 193 yield inner_scm
194 194
195 195 return _get_repos(path)
196 196
197 197
198 198 def _get_dirpaths(p):
199 199 try:
200 200 # OS-independable way of checking if we have at least read-only
201 201 # access or not.
202 202 dirpaths = os.listdir(p)
203 203 except OSError:
204 204 log.warning('ignoring repo path without read access: %s', p)
205 205 return []
206 206
207 207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 208 # decode paths and suddenly returns unicode objects itself. The items it
209 209 # cannot decode are returned as strings and cause issues.
210 210 #
211 211 # Those paths are ignored here until a solid solution for path handling has
212 212 # been built.
213 213 expected_type = type(p)
214 214
215 215 def _has_correct_type(item):
216 216 if type(item) is not expected_type:
217 217 log.error(
218 218 u"Ignoring path %s since it cannot be decoded into unicode.",
219 219 # Using "repr" to make sure that we see the byte value in case
220 220 # of support.
221 221 repr(item))
222 222 return False
223 223 return True
224 224
225 225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 226
227 227 return dirpaths
228 228
229 229
230 230 def _is_dir_writable(path):
231 231 """
232 232 Probe if `path` is writable.
233 233
234 234 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 235 possible to create a file inside of `path`, stat does not produce reliable
236 236 results in this case.
237 237 """
238 238 try:
239 239 with tempfile.TemporaryFile(dir=path):
240 240 pass
241 241 except OSError:
242 242 return False
243 243 return True
244 244
245 245
246 246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 247 """
248 248 Returns True if given path is a valid repository False otherwise.
249 249 If expect_scm param is given also, compare if given scm is the same
250 250 as expected from scm parameter. If explicit_scm is given don't try to
251 251 detect the scm, just use the given one to check if repo is valid
252 252
253 253 :param repo_name:
254 254 :param base_path:
255 255 :param expect_scm:
256 256 :param explicit_scm:
257 257 :param config:
258 258
259 259 :return True: if given path is a valid repository
260 260 """
261 261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 262 log.debug('Checking if `%s` is a valid path for repository. '
263 263 'Explicit type: %s', repo_name, explicit_scm)
264 264
265 265 try:
266 266 if explicit_scm:
267 267 detected_scms = [get_scm_backend(explicit_scm)(
268 268 full_path, config=config).alias]
269 269 else:
270 270 detected_scms = get_scm(full_path)
271 271
272 272 if expect_scm:
273 273 return detected_scms[0] == expect_scm
274 274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 275 return True
276 276 except VCSError:
277 277 log.debug('path: %s is not a valid repo !', full_path)
278 278 return False
279 279
280 280
281 281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 282 """
283 283 Returns True if given path is a repository group, False otherwise
284 284
285 285 :param repo_name:
286 286 :param base_path:
287 287 """
288 288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 289 log.debug('Checking if `%s` is a valid path for repository group',
290 290 repo_group_name)
291 291
292 292 # check if it's not a repo
293 293 if is_valid_repo(repo_group_name, base_path):
294 294 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 295 return False
296 296
297 297 try:
298 298 # we need to check bare git repos at higher level
299 299 # since we might match branches/hooks/info/objects or possible
300 300 # other things inside bare git repo
301 301 maybe_repo = os.path.dirname(full_path)
302 302 if maybe_repo == base_path:
303 303 # skip root level repo check, we know root location CANNOT BE a repo group
304 304 return False
305 305
306 306 scm_ = get_scm(maybe_repo)
307 307 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 308 return False
309 309 except VCSError:
310 310 pass
311 311
312 312 # check if it's a valid path
313 313 if skip_path_check or os.path.isdir(full_path):
314 314 log.debug('path: %s is a valid repo group !', full_path)
315 315 return True
316 316
317 317 log.debug('path: %s is not a valid repo group !', full_path)
318 318 return False
319 319
320 320
321 321 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 322 while True:
323 323 ok = raw_input(prompt)
324 324 if ok.lower() in ('y', 'ye', 'yes'):
325 325 return True
326 326 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 327 return False
328 328 retries = retries - 1
329 329 if retries < 0:
330 330 raise IOError
331 331 print(complaint)
332 332
333 333 # propagated from mercurial documentation
334 334 ui_sections = [
335 335 'alias', 'auth',
336 336 'decode/encode', 'defaults',
337 337 'diff', 'email',
338 338 'extensions', 'format',
339 339 'merge-patterns', 'merge-tools',
340 340 'hooks', 'http_proxy',
341 341 'smtp', 'patch',
342 342 'paths', 'profiling',
343 343 'server', 'trusted',
344 344 'ui', 'web', ]
345 345
346 346
347 347 def config_data_from_db(clear_session=True, repo=None):
348 348 """
349 349 Read the configuration data from the database and return configuration
350 350 tuples.
351 351 """
352 352 from rhodecode.model.settings import VcsSettingsModel
353 353
354 354 config = []
355 355
356 356 sa = meta.Session()
357 357 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358 358
359 359 ui_settings = settings_model.get_ui_settings()
360 360
361 361 ui_data = []
362 362 for setting in ui_settings:
363 363 if setting.active:
364 364 ui_data.append((setting.section, setting.key, setting.value))
365 365 config.append((
366 366 safe_str(setting.section), safe_str(setting.key),
367 367 safe_str(setting.value)))
368 368 if setting.key == 'push_ssl':
369 369 # force set push_ssl requirement to False, rhodecode
370 370 # handles that
371 371 config.append((
372 372 safe_str(setting.section), safe_str(setting.key), False))
373 373 log.debug(
374 'settings ui from db: %s',
374 'settings ui from db@repo[%s]: %s',
375 repo,
375 376 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
376 377 if clear_session:
377 378 meta.Session.remove()
378 379
379 380 # TODO: mikhail: probably it makes no sense to re-read hooks information.
380 381 # It's already there and activated/deactivated
381 382 skip_entries = []
382 383 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
383 384 if 'pull' not in enabled_hook_classes:
384 385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
385 386 if 'push' not in enabled_hook_classes:
386 387 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
387 388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
388 389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
389 390
390 391 config = [entry for entry in config if entry[:2] not in skip_entries]
391 392
392 393 return config
393 394
394 395
395 396 def make_db_config(clear_session=True, repo=None):
396 397 """
397 398 Create a :class:`Config` instance based on the values in the database.
398 399 """
399 400 config = Config()
400 401 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
401 402 for section, option, value in config_data:
402 403 config.set(section, option, value)
403 404 return config
404 405
405 406
406 407 def get_enabled_hook_classes(ui_settings):
407 408 """
408 409 Return the enabled hook classes.
409 410
410 411 :param ui_settings: List of ui_settings as returned
411 412 by :meth:`VcsSettingsModel.get_ui_settings`
412 413
413 414 :return: a list with the enabled hook classes. The order is not guaranteed.
414 415 :rtype: list
415 416 """
416 417 enabled_hooks = []
417 418 active_hook_keys = [
418 419 key for section, key, value, active in ui_settings
419 420 if section == 'hooks' and active]
420 421
421 422 hook_names = {
422 423 RhodeCodeUi.HOOK_PUSH: 'push',
423 424 RhodeCodeUi.HOOK_PULL: 'pull',
424 425 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
425 426 }
426 427
427 428 for key in active_hook_keys:
428 429 hook = hook_names.get(key)
429 430 if hook:
430 431 enabled_hooks.append(hook)
431 432
432 433 return enabled_hooks
433 434
434 435
435 436 def set_rhodecode_config(config):
436 437 """
437 438 Updates pyramid config with new settings from database
438 439
439 440 :param config:
440 441 """
441 442 from rhodecode.model.settings import SettingsModel
442 443 app_settings = SettingsModel().get_all_settings()
443 444
444 445 for k, v in app_settings.items():
445 446 config[k] = v
446 447
447 448
448 449 def get_rhodecode_realm():
449 450 """
450 451 Return the rhodecode realm from database.
451 452 """
452 453 from rhodecode.model.settings import SettingsModel
453 454 realm = SettingsModel().get_setting_by_name('realm')
454 455 return safe_str(realm.app_settings_value)
455 456
456 457
457 458 def get_rhodecode_base_path():
458 459 """
459 460 Returns the base path. The base path is the filesystem path which points
460 461 to the repository store.
461 462 """
462 463 from rhodecode.model.settings import SettingsModel
463 464 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
464 465 return safe_str(paths_ui.ui_value)
465 466
466 467
467 468 def map_groups(path):
468 469 """
469 470 Given a full path to a repository, create all nested groups that this
470 471 repo is inside. This function creates parent-child relationships between
471 472 groups and creates default perms for all new groups.
472 473
473 474 :param paths: full path to repository
474 475 """
475 476 from rhodecode.model.repo_group import RepoGroupModel
476 477 sa = meta.Session()
477 478 groups = path.split(Repository.NAME_SEP)
478 479 parent = None
479 480 group = None
480 481
481 482 # last element is repo in nested groups structure
482 483 groups = groups[:-1]
483 484 rgm = RepoGroupModel(sa)
484 485 owner = User.get_first_super_admin()
485 486 for lvl, group_name in enumerate(groups):
486 487 group_name = '/'.join(groups[:lvl] + [group_name])
487 488 group = RepoGroup.get_by_group_name(group_name)
488 489 desc = '%s group' % group_name
489 490
490 491 # skip folders that are now removed repos
491 492 if REMOVED_REPO_PAT.match(group_name):
492 493 break
493 494
494 495 if group is None:
495 496 log.debug('creating group level: %s group_name: %s',
496 497 lvl, group_name)
497 498 group = RepoGroup(group_name, parent)
498 499 group.group_description = desc
499 500 group.user = owner
500 501 sa.add(group)
501 502 perm_obj = rgm._create_default_perms(group)
502 503 sa.add(perm_obj)
503 504 sa.flush()
504 505
505 506 parent = group
506 507 return group
507 508
508 509
509 510 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
510 511 """
511 512 maps all repos given in initial_repo_list, non existing repositories
512 513 are created, if remove_obsolete is True it also checks for db entries
513 514 that are not in initial_repo_list and removes them.
514 515
515 516 :param initial_repo_list: list of repositories found by scanning methods
516 517 :param remove_obsolete: check for obsolete entries in database
517 518 """
518 519 from rhodecode.model.repo import RepoModel
519 520 from rhodecode.model.repo_group import RepoGroupModel
520 521 from rhodecode.model.settings import SettingsModel
521 522
522 523 sa = meta.Session()
523 524 repo_model = RepoModel()
524 525 user = User.get_first_super_admin()
525 526 added = []
526 527
527 528 # creation defaults
528 529 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
529 530 enable_statistics = defs.get('repo_enable_statistics')
530 531 enable_locking = defs.get('repo_enable_locking')
531 532 enable_downloads = defs.get('repo_enable_downloads')
532 533 private = defs.get('repo_private')
533 534
534 535 for name, repo in initial_repo_list.items():
535 536 group = map_groups(name)
536 537 unicode_name = safe_unicode(name)
537 538 db_repo = repo_model.get_by_repo_name(unicode_name)
538 539 # found repo that is on filesystem not in RhodeCode database
539 540 if not db_repo:
540 541 log.info('repository %s not found, creating now', name)
541 542 added.append(name)
542 543 desc = (repo.description
543 544 if repo.description != 'unknown'
544 545 else '%s repository' % name)
545 546
546 547 db_repo = repo_model._create_repo(
547 548 repo_name=name,
548 549 repo_type=repo.alias,
549 550 description=desc,
550 551 repo_group=getattr(group, 'group_id', None),
551 552 owner=user,
552 553 enable_locking=enable_locking,
553 554 enable_downloads=enable_downloads,
554 555 enable_statistics=enable_statistics,
555 556 private=private,
556 557 state=Repository.STATE_CREATED
557 558 )
558 559 sa.commit()
559 560 # we added that repo just now, and make sure we updated server info
560 561 if db_repo.repo_type == 'git':
561 562 git_repo = db_repo.scm_instance()
562 563 # update repository server-info
563 564 log.debug('Running update server info')
564 565 git_repo._update_server_info()
565 566
566 567 db_repo.update_commit_cache()
567 568
568 569 config = db_repo._config
569 570 config.set('extensions', 'largefiles', '')
570 571 repo = db_repo.scm_instance(config=config)
571 572 repo.install_hooks()
572 573
573 574 removed = []
574 575 if remove_obsolete:
575 576 # remove from database those repositories that are not in the filesystem
576 577 for repo in sa.query(Repository).all():
577 578 if repo.repo_name not in initial_repo_list.keys():
578 579 log.debug("Removing non-existing repository found in db `%s`",
579 580 repo.repo_name)
580 581 try:
581 582 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 583 sa.commit()
583 584 removed.append(repo.repo_name)
584 585 except Exception:
585 586 # don't hold further removals on error
586 587 log.error(traceback.format_exc())
587 588 sa.rollback()
588 589
589 590 def splitter(full_repo_name):
590 591 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 592 gr_name = None
592 593 if len(_parts) == 2:
593 594 gr_name = _parts[0]
594 595 return gr_name
595 596
596 597 initial_repo_group_list = [splitter(x) for x in
597 598 initial_repo_list.keys() if splitter(x)]
598 599
599 600 # remove from database those repository groups that are not in the
600 601 # filesystem due to parent child relationships we need to delete them
601 602 # in a specific order of most nested first
602 603 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 604 nested_sort = lambda gr: len(gr.split('/'))
604 605 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 606 if group_name not in initial_repo_group_list:
606 607 repo_group = RepoGroup.get_by_group_name(group_name)
607 608 if (repo_group.children.all() or
608 609 not RepoGroupModel().check_exist_filesystem(
609 610 group_name=group_name, exc_on_failure=False)):
610 611 continue
611 612
612 613 log.info(
613 614 'Removing non-existing repository group found in db `%s`',
614 615 group_name)
615 616 try:
616 617 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 618 sa.commit()
618 619 removed.append(group_name)
619 620 except Exception:
620 621 # don't hold further removals on error
621 622 log.exception(
622 623 'Unable to remove repository group `%s`',
623 624 group_name)
624 625 sa.rollback()
625 626 raise
626 627
627 628 return added, removed
628 629
629 630
630 631 def load_rcextensions(root_path):
631 632 import rhodecode
632 633 from rhodecode.config import conf
633 634
634 635 path = os.path.join(root_path)
635 636 sys.path.append(path)
636 637 try:
637 638 rcextensions = __import__('rcextensions')
638 639 except ImportError:
639 640 log.warn('Unable to load rcextensions from %s', path)
640 641 rcextensions = None
641 642
642 643 if rcextensions:
643 644 log.debug('Found rcextensions module loaded %s...', rcextensions)
644 645 rhodecode.EXTENSIONS = rcextensions
645 646
646 647 # Additional mappings that are not present in the pygments lexers
647 648 conf.LANGUAGES_EXTENSIONS_MAP.update(
648 649 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
649 650
650 651
651 652 def get_custom_lexer(extension):
652 653 """
653 654 returns a custom lexer if it is defined in rcextensions module, or None
654 655 if there's no custom lexer defined
655 656 """
656 657 import rhodecode
657 658 from pygments import lexers
658 659
659 660 # custom override made by RhodeCode
660 661 if extension in ['mako']:
661 662 return lexers.get_lexer_by_name('html+mako')
662 663
663 664 # check if we didn't define this extension as other lexer
664 665 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
665 666 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
666 667 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
667 668 return lexers.get_lexer_by_name(_lexer_name)
668 669
669 670
670 671 #==============================================================================
671 672 # TEST FUNCTIONS AND CREATORS
672 673 #==============================================================================
673 674 def create_test_index(repo_location, config):
674 675 """
675 676 Makes default test index.
676 677 """
677 678 import rc_testdata
678 679
679 680 rc_testdata.extract_search_index(
680 681 'vcs_search_index', os.path.dirname(config['search.location']))
681 682
682 683
683 684 def create_test_directory(test_path):
684 685 """
685 686 Create test directory if it doesn't exist.
686 687 """
687 688 if not os.path.isdir(test_path):
688 689 log.debug('Creating testdir %s', test_path)
689 690 os.makedirs(test_path)
690 691
691 692
692 693 def create_test_database(test_path, config):
693 694 """
694 695 Makes a fresh database.
695 696 """
696 697 from rhodecode.lib.db_manage import DbManage
697 698
698 699 # PART ONE create db
699 700 dbconf = config['sqlalchemy.db1.url']
700 701 log.debug('making test db %s', dbconf)
701 702
702 703 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
703 704 tests=True, cli_args={'force_ask': True})
704 705 dbmanage.create_tables(override=True)
705 706 dbmanage.set_db_version()
706 707 # for tests dynamically set new root paths based on generated content
707 708 dbmanage.create_settings(dbmanage.config_prompt(test_path))
708 709 dbmanage.create_default_user()
709 710 dbmanage.create_test_admin_and_users()
710 711 dbmanage.create_permissions()
711 712 dbmanage.populate_default_permissions()
712 713 Session().commit()
713 714
714 715
715 716 def create_test_repositories(test_path, config):
716 717 """
717 718 Creates test repositories in the temporary directory. Repositories are
718 719 extracted from archives within the rc_testdata package.
719 720 """
720 721 import rc_testdata
721 722 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
722 723
723 724 log.debug('making test vcs repositories')
724 725
725 726 idx_path = config['search.location']
726 727 data_path = config['cache_dir']
727 728
728 729 # clean index and data
729 730 if idx_path and os.path.exists(idx_path):
730 731 log.debug('remove %s', idx_path)
731 732 shutil.rmtree(idx_path)
732 733
733 734 if data_path and os.path.exists(data_path):
734 735 log.debug('remove %s', data_path)
735 736 shutil.rmtree(data_path)
736 737
737 738 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
738 739 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
739 740
740 741 # Note: Subversion is in the process of being integrated with the system,
741 742 # until we have a properly packed version of the test svn repository, this
742 743 # tries to copy over the repo from a package "rc_testdata"
743 744 svn_repo_path = rc_testdata.get_svn_repo_archive()
744 745 with tarfile.open(svn_repo_path) as tar:
745 746 tar.extractall(jn(test_path, SVN_REPO))
746 747
747 748
748 749 def password_changed(auth_user, session):
749 750 # Never report password change in case of default user or anonymous user.
750 751 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
751 752 return False
752 753
753 754 password_hash = md5(auth_user.password) if auth_user.password else None
754 755 rhodecode_user = session.get('rhodecode_user', {})
755 756 session_password_hash = rhodecode_user.get('password', '')
756 757 return password_hash != session_password_hash
757 758
758 759
759 760 def read_opensource_licenses():
760 761 global _license_cache
761 762
762 763 if not _license_cache:
763 764 licenses = pkg_resources.resource_string(
764 765 'rhodecode', 'config/licenses.json')
765 766 _license_cache = json.loads(licenses)
766 767
767 768 return _license_cache
768 769
769 770
770 771 def generate_platform_uuid():
771 772 """
772 773 Generates platform UUID based on it's name
773 774 """
774 775 import platform
775 776
776 777 try:
777 778 uuid_list = [platform.platform()]
778 779 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
779 780 except Exception as e:
780 781 log.error('Failed to generate host uuid: %s', e)
781 782 return 'UNDEFINED'
@@ -1,1846 +1,1849 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36 from pyramid import compat
37 37
38 38 from rhodecode.translation import lazy_ugettext
39 39 from rhodecode.lib.utils2 import safe_str, safe_unicode
40 40 from rhodecode.lib.vcs import connection
41 41 from rhodecode.lib.vcs.utils import author_name, author_email
42 42 from rhodecode.lib.vcs.conf import settings
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 48 RepositoryError)
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 FILEMODE_DEFAULT = 0o100644
55 55 FILEMODE_EXECUTABLE = 0o100755
56 56
57 57 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
58 58
59 59
60 60 class MergeFailureReason(object):
61 61 """
62 62 Enumeration with all the reasons why the server side merge could fail.
63 63
64 64 DO NOT change the number of the reasons, as they may be stored in the
65 65 database.
66 66
67 67 Changing the name of a reason is acceptable and encouraged to deprecate old
68 68 reasons.
69 69 """
70 70
71 71 # Everything went well.
72 72 NONE = 0
73 73
74 74 # An unexpected exception was raised. Check the logs for more details.
75 75 UNKNOWN = 1
76 76
77 77 # The merge was not successful, there are conflicts.
78 78 MERGE_FAILED = 2
79 79
80 80 # The merge succeeded but we could not push it to the target repository.
81 81 PUSH_FAILED = 3
82 82
83 83 # The specified target is not a head in the target repository.
84 84 TARGET_IS_NOT_HEAD = 4
85 85
86 86 # The source repository contains more branches than the target. Pushing
87 87 # the merge will create additional branches in the target.
88 88 HG_SOURCE_HAS_MORE_BRANCHES = 5
89 89
90 90 # The target reference has multiple heads. That does not allow to correctly
91 91 # identify the target location. This could only happen for mercurial
92 92 # branches.
93 93 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94 94
95 95 # The target repository is locked
96 96 TARGET_IS_LOCKED = 7
97 97
98 98 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 99 # A involved commit could not be found.
100 100 _DEPRECATED_MISSING_COMMIT = 8
101 101
102 102 # The target repo reference is missing.
103 103 MISSING_TARGET_REF = 9
104 104
105 105 # The source repo reference is missing.
106 106 MISSING_SOURCE_REF = 10
107 107
108 108 # The merge was not successful, there are conflicts related to sub
109 109 # repositories.
110 110 SUBREPO_MERGE_FAILED = 11
111 111
112 112
113 113 class UpdateFailureReason(object):
114 114 """
115 115 Enumeration with all the reasons why the pull request update could fail.
116 116
117 117 DO NOT change the number of the reasons, as they may be stored in the
118 118 database.
119 119
120 120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 121 reasons.
122 122 """
123 123
124 124 # Everything went well.
125 125 NONE = 0
126 126
127 127 # An unexpected exception was raised. Check the logs for more details.
128 128 UNKNOWN = 1
129 129
130 130 # The pull request is up to date.
131 131 NO_CHANGE = 2
132 132
133 133 # The pull request has a reference type that is not supported for update.
134 134 WRONG_REF_TYPE = 3
135 135
136 136 # Update failed because the target reference is missing.
137 137 MISSING_TARGET_REF = 4
138 138
139 139 # Update failed because the source reference is missing.
140 140 MISSING_SOURCE_REF = 5
141 141
142 142
143 143 class MergeResponse(object):
144 144
145 145 # uses .format(**metadata) for variables
146 146 MERGE_STATUS_MESSAGES = {
147 147 MergeFailureReason.NONE: lazy_ugettext(
148 148 u'This pull request can be automatically merged.'),
149 149 MergeFailureReason.UNKNOWN: lazy_ugettext(
150 150 u'This pull request cannot be merged because of an unhandled exception. '
151 151 u'{exception}'),
152 152 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
153 153 u'This pull request cannot be merged because of merge conflicts.'),
154 154 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
155 155 u'This pull request could not be merged because push to '
156 156 u'target:`{target}@{merge_commit}` failed.'),
157 157 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
158 158 u'This pull request cannot be merged because the target '
159 159 u'`{target_ref.name}` is not a head.'),
160 160 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
161 161 u'This pull request cannot be merged because the source contains '
162 162 u'more branches than the target.'),
163 163 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
164 u'This pull request cannot be merged because the target '
164 u'This pull request cannot be merged because the target `{target_ref.name}` '
165 165 u'has multiple heads: `{heads}`.'),
166 166 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target repository is '
168 168 u'locked by {locked_by}.'),
169 169
170 170 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
171 171 u'This pull request cannot be merged because the target '
172 172 u'reference `{target_ref.name}` is missing.'),
173 173 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the source '
175 175 u'reference `{source_ref.name}` is missing.'),
176 176 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
177 177 u'This pull request cannot be merged because of conflicts related '
178 178 u'to sub repositories.'),
179 179
180 180 # Deprecations
181 181 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
182 182 u'This pull request cannot be merged because the target or the '
183 183 u'source reference is missing.'),
184 184
185 185 }
186 186
187 187 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
188 188 self.possible = possible
189 189 self.executed = executed
190 190 self.merge_ref = merge_ref
191 191 self.failure_reason = failure_reason
192 192 self.metadata = metadata or {}
193 193
194 194 def __repr__(self):
195 195 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
196 196
197 197 def __eq__(self, other):
198 198 same_instance = isinstance(other, self.__class__)
199 199 return same_instance \
200 200 and self.possible == other.possible \
201 201 and self.executed == other.executed \
202 202 and self.failure_reason == other.failure_reason
203 203
204 204 @property
205 205 def label(self):
206 206 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
207 207 not k.startswith('_'))
208 208 return label_dict.get(self.failure_reason)
209 209
210 210 @property
211 211 def merge_status_message(self):
212 212 """
213 213 Return a human friendly error message for the given merge status code.
214 214 """
215 215 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
216 216 try:
217 217 return msg.format(**self.metadata)
218 218 except Exception:
219 219 log.exception('Failed to format %s message', self)
220 220 return msg
221 221
222 222 def asdict(self):
223 223 data = {}
224 224 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
225 225 'merge_status_message']:
226 226 data[k] = getattr(self, k)
227 227 return data
228 228
229 229
230 230 class BaseRepository(object):
231 231 """
232 232 Base Repository for final backends
233 233
234 234 .. attribute:: DEFAULT_BRANCH_NAME
235 235
236 236 name of default branch (i.e. "trunk" for svn, "master" for git etc.
237 237
238 238 .. attribute:: commit_ids
239 239
240 240 list of all available commit ids, in ascending order
241 241
242 242 .. attribute:: path
243 243
244 244 absolute path to the repository
245 245
246 246 .. attribute:: bookmarks
247 247
248 248 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
249 249 there are no bookmarks or the backend implementation does not support
250 250 bookmarks.
251 251
252 252 .. attribute:: tags
253 253
254 254 Mapping from name to :term:`Commit ID` of the tag.
255 255
256 256 """
257 257
258 258 DEFAULT_BRANCH_NAME = None
259 259 DEFAULT_CONTACT = u"Unknown"
260 260 DEFAULT_DESCRIPTION = u"unknown"
261 261 EMPTY_COMMIT_ID = '0' * 40
262 262
263 263 path = None
264 264
265 265 def __init__(self, repo_path, config=None, create=False, **kwargs):
266 266 """
267 267 Initializes repository. Raises RepositoryError if repository could
268 268 not be find at the given ``repo_path`` or directory at ``repo_path``
269 269 exists and ``create`` is set to True.
270 270
271 271 :param repo_path: local path of the repository
272 272 :param config: repository configuration
273 273 :param create=False: if set to True, would try to create repository.
274 274 :param src_url=None: if set, should be proper url from which repository
275 275 would be cloned; requires ``create`` parameter to be set to True -
276 276 raises RepositoryError if src_url is set and create evaluates to
277 277 False
278 278 """
279 279 raise NotImplementedError
280 280
281 281 def __repr__(self):
282 282 return '<%s at %s>' % (self.__class__.__name__, self.path)
283 283
284 284 def __len__(self):
285 285 return self.count()
286 286
287 287 def __eq__(self, other):
288 288 same_instance = isinstance(other, self.__class__)
289 289 return same_instance and other.path == self.path
290 290
291 291 def __ne__(self, other):
292 292 return not self.__eq__(other)
293 293
294 294 def get_create_shadow_cache_pr_path(self, db_repo):
295 295 path = db_repo.cached_diffs_dir
296 296 if not os.path.exists(path):
297 297 os.makedirs(path, 0o755)
298 298 return path
299 299
300 300 @classmethod
301 301 def get_default_config(cls, default=None):
302 302 config = Config()
303 303 if default and isinstance(default, list):
304 304 for section, key, val in default:
305 305 config.set(section, key, val)
306 306 return config
307 307
308 308 @LazyProperty
309 309 def _remote(self):
310 310 raise NotImplementedError
311 311
312 def _heads(self, branch=None):
313 return []
314
312 315 @LazyProperty
313 316 def EMPTY_COMMIT(self):
314 317 return EmptyCommit(self.EMPTY_COMMIT_ID)
315 318
316 319 @LazyProperty
317 320 def alias(self):
318 321 for k, v in settings.BACKENDS.items():
319 322 if v.split('.')[-1] == str(self.__class__.__name__):
320 323 return k
321 324
322 325 @LazyProperty
323 326 def name(self):
324 327 return safe_unicode(os.path.basename(self.path))
325 328
326 329 @LazyProperty
327 330 def description(self):
328 331 raise NotImplementedError
329 332
330 333 def refs(self):
331 334 """
332 335 returns a `dict` with branches, bookmarks, tags, and closed_branches
333 336 for this repository
334 337 """
335 338 return dict(
336 339 branches=self.branches,
337 340 branches_closed=self.branches_closed,
338 341 tags=self.tags,
339 342 bookmarks=self.bookmarks
340 343 )
341 344
342 345 @LazyProperty
343 346 def branches(self):
344 347 """
345 348 A `dict` which maps branch names to commit ids.
346 349 """
347 350 raise NotImplementedError
348 351
349 352 @LazyProperty
350 353 def branches_closed(self):
351 354 """
352 355 A `dict` which maps tags names to commit ids.
353 356 """
354 357 raise NotImplementedError
355 358
356 359 @LazyProperty
357 360 def bookmarks(self):
358 361 """
359 362 A `dict` which maps tags names to commit ids.
360 363 """
361 364 raise NotImplementedError
362 365
363 366 @LazyProperty
364 367 def tags(self):
365 368 """
366 369 A `dict` which maps tags names to commit ids.
367 370 """
368 371 raise NotImplementedError
369 372
370 373 @LazyProperty
371 374 def size(self):
372 375 """
373 376 Returns combined size in bytes for all repository files
374 377 """
375 378 tip = self.get_commit()
376 379 return tip.size
377 380
378 381 def size_at_commit(self, commit_id):
379 382 commit = self.get_commit(commit_id)
380 383 return commit.size
381 384
382 385 def is_empty(self):
383 386 return not bool(self.commit_ids)
384 387
385 388 @staticmethod
386 389 def check_url(url, config):
387 390 """
388 391 Function will check given url and try to verify if it's a valid
389 392 link.
390 393 """
391 394 raise NotImplementedError
392 395
393 396 @staticmethod
394 397 def is_valid_repository(path):
395 398 """
396 399 Check if given `path` contains a valid repository of this backend
397 400 """
398 401 raise NotImplementedError
399 402
400 403 # ==========================================================================
401 404 # COMMITS
402 405 # ==========================================================================
403 406
404 407 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
405 408 """
406 409 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
407 410 are both None, most recent commit is returned.
408 411
409 412 :param pre_load: Optional. List of commit attributes to load.
410 413
411 414 :raises ``EmptyRepositoryError``: if there are no commits
412 415 """
413 416 raise NotImplementedError
414 417
415 418 def __iter__(self):
416 419 for commit_id in self.commit_ids:
417 420 yield self.get_commit(commit_id=commit_id)
418 421
419 422 def get_commits(
420 423 self, start_id=None, end_id=None, start_date=None, end_date=None,
421 424 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
422 425 """
423 426 Returns iterator of `BaseCommit` objects from start to end
424 427 not inclusive. This should behave just like a list, ie. end is not
425 428 inclusive.
426 429
427 430 :param start_id: None or str, must be a valid commit id
428 431 :param end_id: None or str, must be a valid commit id
429 432 :param start_date:
430 433 :param end_date:
431 434 :param branch_name:
432 435 :param show_hidden:
433 436 :param pre_load:
434 437 :param translate_tags:
435 438 """
436 439 raise NotImplementedError
437 440
438 441 def __getitem__(self, key):
439 442 """
440 443 Allows index based access to the commit objects of this repository.
441 444 """
442 445 pre_load = ["author", "branch", "date", "message", "parents"]
443 446 if isinstance(key, slice):
444 447 return self._get_range(key, pre_load)
445 448 return self.get_commit(commit_idx=key, pre_load=pre_load)
446 449
447 450 def _get_range(self, slice_obj, pre_load):
448 451 for commit_id in self.commit_ids.__getitem__(slice_obj):
449 452 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
450 453
451 454 def count(self):
452 455 return len(self.commit_ids)
453 456
454 457 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
455 458 """
456 459 Creates and returns a tag for the given ``commit_id``.
457 460
458 461 :param name: name for new tag
459 462 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
460 463 :param commit_id: commit id for which new tag would be created
461 464 :param message: message of the tag's commit
462 465 :param date: date of tag's commit
463 466
464 467 :raises TagAlreadyExistError: if tag with same name already exists
465 468 """
466 469 raise NotImplementedError
467 470
468 471 def remove_tag(self, name, user, message=None, date=None):
469 472 """
470 473 Removes tag with the given ``name``.
471 474
472 475 :param name: name of the tag to be removed
473 476 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
474 477 :param message: message of the tag's removal commit
475 478 :param date: date of tag's removal commit
476 479
477 480 :raises TagDoesNotExistError: if tag with given name does not exists
478 481 """
479 482 raise NotImplementedError
480 483
481 484 def get_diff(
482 485 self, commit1, commit2, path=None, ignore_whitespace=False,
483 486 context=3, path1=None):
484 487 """
485 488 Returns (git like) *diff*, as plain text. Shows changes introduced by
486 489 `commit2` since `commit1`.
487 490
488 491 :param commit1: Entry point from which diff is shown. Can be
489 492 ``self.EMPTY_COMMIT`` - in this case, patch showing all
490 493 the changes since empty state of the repository until `commit2`
491 494 :param commit2: Until which commit changes should be shown.
492 495 :param path: Can be set to a path of a file to create a diff of that
493 496 file. If `path1` is also set, this value is only associated to
494 497 `commit2`.
495 498 :param ignore_whitespace: If set to ``True``, would not show whitespace
496 499 changes. Defaults to ``False``.
497 500 :param context: How many lines before/after changed lines should be
498 501 shown. Defaults to ``3``.
499 502 :param path1: Can be set to a path to associate with `commit1`. This
500 503 parameter works only for backends which support diff generation for
501 504 different paths. Other backends will raise a `ValueError` if `path1`
502 505 is set and has a different value than `path`.
503 506 :param file_path: filter this diff by given path pattern
504 507 """
505 508 raise NotImplementedError
506 509
507 510 def strip(self, commit_id, branch=None):
508 511 """
509 512 Strip given commit_id from the repository
510 513 """
511 514 raise NotImplementedError
512 515
513 516 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
514 517 """
515 518 Return a latest common ancestor commit if one exists for this repo
516 519 `commit_id1` vs `commit_id2` from `repo2`.
517 520
518 521 :param commit_id1: Commit it from this repository to use as a
519 522 target for the comparison.
520 523 :param commit_id2: Source commit id to use for comparison.
521 524 :param repo2: Source repository to use for comparison.
522 525 """
523 526 raise NotImplementedError
524 527
525 528 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
526 529 """
527 530 Compare this repository's revision `commit_id1` with `commit_id2`.
528 531
529 532 Returns a tuple(commits, ancestor) that would be merged from
530 533 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
531 534 will be returned as ancestor.
532 535
533 536 :param commit_id1: Commit it from this repository to use as a
534 537 target for the comparison.
535 538 :param commit_id2: Source commit id to use for comparison.
536 539 :param repo2: Source repository to use for comparison.
537 540 :param merge: If set to ``True`` will do a merge compare which also
538 541 returns the common ancestor.
539 542 :param pre_load: Optional. List of commit attributes to load.
540 543 """
541 544 raise NotImplementedError
542 545
543 546 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
544 547 user_name='', user_email='', message='', dry_run=False,
545 548 use_rebase=False, close_branch=False):
546 549 """
547 550 Merge the revisions specified in `source_ref` from `source_repo`
548 551 onto the `target_ref` of this repository.
549 552
550 553 `source_ref` and `target_ref` are named tupls with the following
551 554 fields `type`, `name` and `commit_id`.
552 555
553 556 Returns a MergeResponse named tuple with the following fields
554 557 'possible', 'executed', 'source_commit', 'target_commit',
555 558 'merge_commit'.
556 559
557 560 :param repo_id: `repo_id` target repo id.
558 561 :param workspace_id: `workspace_id` unique identifier.
559 562 :param target_ref: `target_ref` points to the commit on top of which
560 563 the `source_ref` should be merged.
561 564 :param source_repo: The repository that contains the commits to be
562 565 merged.
563 566 :param source_ref: `source_ref` points to the topmost commit from
564 567 the `source_repo` which should be merged.
565 568 :param user_name: Merge commit `user_name`.
566 569 :param user_email: Merge commit `user_email`.
567 570 :param message: Merge commit `message`.
568 571 :param dry_run: If `True` the merge will not take place.
569 572 :param use_rebase: If `True` commits from the source will be rebased
570 573 on top of the target instead of being merged.
571 574 :param close_branch: If `True` branch will be close before merging it
572 575 """
573 576 if dry_run:
574 577 message = message or settings.MERGE_DRY_RUN_MESSAGE
575 578 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
576 579 user_name = user_name or settings.MERGE_DRY_RUN_USER
577 580 else:
578 581 if not user_name:
579 582 raise ValueError('user_name cannot be empty')
580 583 if not user_email:
581 584 raise ValueError('user_email cannot be empty')
582 585 if not message:
583 586 raise ValueError('message cannot be empty')
584 587
585 588 try:
586 589 return self._merge_repo(
587 590 repo_id, workspace_id, target_ref, source_repo,
588 591 source_ref, message, user_name, user_email, dry_run=dry_run,
589 592 use_rebase=use_rebase, close_branch=close_branch)
590 593 except RepositoryError as exc:
591 594 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
592 595 return MergeResponse(
593 596 False, False, None, MergeFailureReason.UNKNOWN,
594 597 metadata={'exception': str(exc)})
595 598
596 599 def _merge_repo(self, repo_id, workspace_id, target_ref,
597 600 source_repo, source_ref, merge_message,
598 601 merger_name, merger_email, dry_run=False,
599 602 use_rebase=False, close_branch=False):
600 603 """Internal implementation of merge."""
601 604 raise NotImplementedError
602 605
603 606 def _maybe_prepare_merge_workspace(
604 607 self, repo_id, workspace_id, target_ref, source_ref):
605 608 """
606 609 Create the merge workspace.
607 610
608 611 :param workspace_id: `workspace_id` unique identifier.
609 612 """
610 613 raise NotImplementedError
611 614
612 615 def _get_legacy_shadow_repository_path(self, workspace_id):
613 616 """
614 617 Legacy version that was used before. We still need it for
615 618 backward compat
616 619 """
617 620 return os.path.join(
618 621 os.path.dirname(self.path),
619 622 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
620 623
621 624 def _get_shadow_repository_path(self, repo_id, workspace_id):
622 625 # The name of the shadow repository must start with '.', so it is
623 626 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
624 627 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
625 628 if os.path.exists(legacy_repository_path):
626 629 return legacy_repository_path
627 630 else:
628 631 return os.path.join(
629 632 os.path.dirname(self.path),
630 633 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
631 634
632 635 def cleanup_merge_workspace(self, repo_id, workspace_id):
633 636 """
634 637 Remove merge workspace.
635 638
636 639 This function MUST not fail in case there is no workspace associated to
637 640 the given `workspace_id`.
638 641
639 642 :param workspace_id: `workspace_id` unique identifier.
640 643 """
641 644 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
642 645 shadow_repository_path_del = '{}.{}.delete'.format(
643 646 shadow_repository_path, time.time())
644 647
645 648 # move the shadow repo, so it never conflicts with the one used.
646 649 # we use this method because shutil.rmtree had some edge case problems
647 650 # removing symlinked repositories
648 651 if not os.path.isdir(shadow_repository_path):
649 652 return
650 653
651 654 shutil.move(shadow_repository_path, shadow_repository_path_del)
652 655 try:
653 656 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
654 657 except Exception:
655 658 log.exception('Failed to gracefully remove shadow repo under %s',
656 659 shadow_repository_path_del)
657 660 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
658 661
659 662 # ========== #
660 663 # COMMIT API #
661 664 # ========== #
662 665
663 666 @LazyProperty
664 667 def in_memory_commit(self):
665 668 """
666 669 Returns :class:`InMemoryCommit` object for this repository.
667 670 """
668 671 raise NotImplementedError
669 672
670 673 # ======================== #
671 674 # UTILITIES FOR SUBCLASSES #
672 675 # ======================== #
673 676
674 677 def _validate_diff_commits(self, commit1, commit2):
675 678 """
676 679 Validates that the given commits are related to this repository.
677 680
678 681 Intended as a utility for sub classes to have a consistent validation
679 682 of input parameters in methods like :meth:`get_diff`.
680 683 """
681 684 self._validate_commit(commit1)
682 685 self._validate_commit(commit2)
683 686 if (isinstance(commit1, EmptyCommit) and
684 687 isinstance(commit2, EmptyCommit)):
685 688 raise ValueError("Cannot compare two empty commits")
686 689
687 690 def _validate_commit(self, commit):
688 691 if not isinstance(commit, BaseCommit):
689 692 raise TypeError(
690 693 "%s is not of type BaseCommit" % repr(commit))
691 694 if commit.repository != self and not isinstance(commit, EmptyCommit):
692 695 raise ValueError(
693 696 "Commit %s must be a valid commit from this repository %s, "
694 697 "related to this repository instead %s." %
695 698 (commit, self, commit.repository))
696 699
697 700 def _validate_commit_id(self, commit_id):
698 701 if not isinstance(commit_id, compat.string_types):
699 702 raise TypeError("commit_id must be a string value")
700 703
701 704 def _validate_commit_idx(self, commit_idx):
702 705 if not isinstance(commit_idx, (int, long)):
703 706 raise TypeError("commit_idx must be a numeric value")
704 707
705 708 def _validate_branch_name(self, branch_name):
706 709 if branch_name and branch_name not in self.branches_all:
707 710 msg = ("Branch %s not found in %s" % (branch_name, self))
708 711 raise BranchDoesNotExistError(msg)
709 712
710 713 #
711 714 # Supporting deprecated API parts
712 715 # TODO: johbo: consider to move this into a mixin
713 716 #
714 717
715 718 @property
716 719 def EMPTY_CHANGESET(self):
717 720 warnings.warn(
718 721 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
719 722 return self.EMPTY_COMMIT_ID
720 723
721 724 @property
722 725 def revisions(self):
723 726 warnings.warn("Use commits attribute instead", DeprecationWarning)
724 727 return self.commit_ids
725 728
726 729 @revisions.setter
727 730 def revisions(self, value):
728 731 warnings.warn("Use commits attribute instead", DeprecationWarning)
729 732 self.commit_ids = value
730 733
731 734 def get_changeset(self, revision=None, pre_load=None):
732 735 warnings.warn("Use get_commit instead", DeprecationWarning)
733 736 commit_id = None
734 737 commit_idx = None
735 738 if isinstance(revision, compat.string_types):
736 739 commit_id = revision
737 740 else:
738 741 commit_idx = revision
739 742 return self.get_commit(
740 743 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
741 744
742 745 def get_changesets(
743 746 self, start=None, end=None, start_date=None, end_date=None,
744 747 branch_name=None, pre_load=None):
745 748 warnings.warn("Use get_commits instead", DeprecationWarning)
746 749 start_id = self._revision_to_commit(start)
747 750 end_id = self._revision_to_commit(end)
748 751 return self.get_commits(
749 752 start_id=start_id, end_id=end_id, start_date=start_date,
750 753 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
751 754
752 755 def _revision_to_commit(self, revision):
753 756 """
754 757 Translates a revision to a commit_id
755 758
756 759 Helps to support the old changeset based API which allows to use
757 760 commit ids and commit indices interchangeable.
758 761 """
759 762 if revision is None:
760 763 return revision
761 764
762 765 if isinstance(revision, compat.string_types):
763 766 commit_id = revision
764 767 else:
765 768 commit_id = self.commit_ids[revision]
766 769 return commit_id
767 770
768 771 @property
769 772 def in_memory_changeset(self):
770 773 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
771 774 return self.in_memory_commit
772 775
773 776 def get_path_permissions(self, username):
774 777 """
775 778 Returns a path permission checker or None if not supported
776 779
777 780 :param username: session user name
778 781 :return: an instance of BasePathPermissionChecker or None
779 782 """
780 783 return None
781 784
782 785 def install_hooks(self, force=False):
783 786 return self._remote.install_hooks(force)
784 787
785 788 def get_hooks_info(self):
786 789 return self._remote.get_hooks_info()
787 790
788 791
789 792 class BaseCommit(object):
790 793 """
791 794 Each backend should implement it's commit representation.
792 795
793 796 **Attributes**
794 797
795 798 ``repository``
796 799 repository object within which commit exists
797 800
798 801 ``id``
799 802 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
800 803 just ``tip``.
801 804
802 805 ``raw_id``
803 806 raw commit representation (i.e. full 40 length sha for git
804 807 backend)
805 808
806 809 ``short_id``
807 810 shortened (if apply) version of ``raw_id``; it would be simple
808 811 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
809 812 as ``raw_id`` for subversion
810 813
811 814 ``idx``
812 815 commit index
813 816
814 817 ``files``
815 818 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
816 819
817 820 ``dirs``
818 821 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
819 822
820 823 ``nodes``
821 824 combined list of ``Node`` objects
822 825
823 826 ``author``
824 827 author of the commit, as unicode
825 828
826 829 ``message``
827 830 message of the commit, as unicode
828 831
829 832 ``parents``
830 833 list of parent commits
831 834
832 835 """
833 836
834 837 branch = None
835 838 """
836 839 Depending on the backend this should be set to the branch name of the
837 840 commit. Backends not supporting branches on commits should leave this
838 841 value as ``None``.
839 842 """
840 843
841 844 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
842 845 """
843 846 This template is used to generate a default prefix for repository archives
844 847 if no prefix has been specified.
845 848 """
846 849
847 850 def __str__(self):
848 851 return '<%s at %s:%s>' % (
849 852 self.__class__.__name__, self.idx, self.short_id)
850 853
851 854 def __repr__(self):
852 855 return self.__str__()
853 856
854 857 def __unicode__(self):
855 858 return u'%s:%s' % (self.idx, self.short_id)
856 859
857 860 def __eq__(self, other):
858 861 same_instance = isinstance(other, self.__class__)
859 862 return same_instance and self.raw_id == other.raw_id
860 863
861 864 def __json__(self):
862 865 parents = []
863 866 try:
864 867 for parent in self.parents:
865 868 parents.append({'raw_id': parent.raw_id})
866 869 except NotImplementedError:
867 870 # empty commit doesn't have parents implemented
868 871 pass
869 872
870 873 return {
871 874 'short_id': self.short_id,
872 875 'raw_id': self.raw_id,
873 876 'revision': self.idx,
874 877 'message': self.message,
875 878 'date': self.date,
876 879 'author': self.author,
877 880 'parents': parents,
878 881 'branch': self.branch
879 882 }
880 883
881 884 def __getstate__(self):
882 885 d = self.__dict__.copy()
883 886 d.pop('_remote', None)
884 887 d.pop('repository', None)
885 888 return d
886 889
887 890 def _get_refs(self):
888 891 return {
889 892 'branches': [self.branch] if self.branch else [],
890 893 'bookmarks': getattr(self, 'bookmarks', []),
891 894 'tags': self.tags
892 895 }
893 896
894 897 @LazyProperty
895 898 def last(self):
896 899 """
897 900 ``True`` if this is last commit in repository, ``False``
898 901 otherwise; trying to access this attribute while there is no
899 902 commits would raise `EmptyRepositoryError`
900 903 """
901 904 if self.repository is None:
902 905 raise CommitError("Cannot check if it's most recent commit")
903 906 return self.raw_id == self.repository.commit_ids[-1]
904 907
905 908 @LazyProperty
906 909 def parents(self):
907 910 """
908 911 Returns list of parent commits.
909 912 """
910 913 raise NotImplementedError
911 914
912 915 @LazyProperty
913 916 def first_parent(self):
914 917 """
915 918 Returns list of parent commits.
916 919 """
917 920 return self.parents[0] if self.parents else EmptyCommit()
918 921
919 922 @property
920 923 def merge(self):
921 924 """
922 925 Returns boolean if commit is a merge.
923 926 """
924 927 return len(self.parents) > 1
925 928
926 929 @LazyProperty
927 930 def children(self):
928 931 """
929 932 Returns list of child commits.
930 933 """
931 934 raise NotImplementedError
932 935
933 936 @LazyProperty
934 937 def id(self):
935 938 """
936 939 Returns string identifying this commit.
937 940 """
938 941 raise NotImplementedError
939 942
940 943 @LazyProperty
941 944 def raw_id(self):
942 945 """
943 946 Returns raw string identifying this commit.
944 947 """
945 948 raise NotImplementedError
946 949
947 950 @LazyProperty
948 951 def short_id(self):
949 952 """
950 953 Returns shortened version of ``raw_id`` attribute, as string,
951 954 identifying this commit, useful for presentation to users.
952 955 """
953 956 raise NotImplementedError
954 957
955 958 @LazyProperty
956 959 def idx(self):
957 960 """
958 961 Returns integer identifying this commit.
959 962 """
960 963 raise NotImplementedError
961 964
962 965 @LazyProperty
963 966 def committer(self):
964 967 """
965 968 Returns committer for this commit
966 969 """
967 970 raise NotImplementedError
968 971
969 972 @LazyProperty
970 973 def committer_name(self):
971 974 """
972 975 Returns committer name for this commit
973 976 """
974 977
975 978 return author_name(self.committer)
976 979
977 980 @LazyProperty
978 981 def committer_email(self):
979 982 """
980 983 Returns committer email address for this commit
981 984 """
982 985
983 986 return author_email(self.committer)
984 987
985 988 @LazyProperty
986 989 def author(self):
987 990 """
988 991 Returns author for this commit
989 992 """
990 993
991 994 raise NotImplementedError
992 995
993 996 @LazyProperty
994 997 def author_name(self):
995 998 """
996 999 Returns author name for this commit
997 1000 """
998 1001
999 1002 return author_name(self.author)
1000 1003
1001 1004 @LazyProperty
1002 1005 def author_email(self):
1003 1006 """
1004 1007 Returns author email address for this commit
1005 1008 """
1006 1009
1007 1010 return author_email(self.author)
1008 1011
1009 1012 def get_file_mode(self, path):
1010 1013 """
1011 1014 Returns stat mode of the file at `path`.
1012 1015 """
1013 1016 raise NotImplementedError
1014 1017
1015 1018 def is_link(self, path):
1016 1019 """
1017 1020 Returns ``True`` if given `path` is a symlink
1018 1021 """
1019 1022 raise NotImplementedError
1020 1023
1021 1024 def get_file_content(self, path):
1022 1025 """
1023 1026 Returns content of the file at the given `path`.
1024 1027 """
1025 1028 raise NotImplementedError
1026 1029
1027 1030 def get_file_size(self, path):
1028 1031 """
1029 1032 Returns size of the file at the given `path`.
1030 1033 """
1031 1034 raise NotImplementedError
1032 1035
1033 1036 def get_path_commit(self, path, pre_load=None):
1034 1037 """
1035 1038 Returns last commit of the file at the given `path`.
1036 1039
1037 1040 :param pre_load: Optional. List of commit attributes to load.
1038 1041 """
1039 1042 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1040 1043 if not commits:
1041 1044 raise RepositoryError(
1042 1045 'Failed to fetch history for path {}. '
1043 1046 'Please check if such path exists in your repository'.format(
1044 1047 path))
1045 1048 return commits[0]
1046 1049
1047 1050 def get_path_history(self, path, limit=None, pre_load=None):
1048 1051 """
1049 1052 Returns history of file as reversed list of :class:`BaseCommit`
1050 1053 objects for which file at given `path` has been modified.
1051 1054
1052 1055 :param limit: Optional. Allows to limit the size of the returned
1053 1056 history. This is intended as a hint to the underlying backend, so
1054 1057 that it can apply optimizations depending on the limit.
1055 1058 :param pre_load: Optional. List of commit attributes to load.
1056 1059 """
1057 1060 raise NotImplementedError
1058 1061
1059 1062 def get_file_annotate(self, path, pre_load=None):
1060 1063 """
1061 1064 Returns a generator of four element tuples with
1062 1065 lineno, sha, commit lazy loader and line
1063 1066
1064 1067 :param pre_load: Optional. List of commit attributes to load.
1065 1068 """
1066 1069 raise NotImplementedError
1067 1070
1068 1071 def get_nodes(self, path):
1069 1072 """
1070 1073 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1071 1074 state of commit at the given ``path``.
1072 1075
1073 1076 :raises ``CommitError``: if node at the given ``path`` is not
1074 1077 instance of ``DirNode``
1075 1078 """
1076 1079 raise NotImplementedError
1077 1080
1078 1081 def get_node(self, path):
1079 1082 """
1080 1083 Returns ``Node`` object from the given ``path``.
1081 1084
1082 1085 :raises ``NodeDoesNotExistError``: if there is no node at the given
1083 1086 ``path``
1084 1087 """
1085 1088 raise NotImplementedError
1086 1089
1087 1090 def get_largefile_node(self, path):
1088 1091 """
1089 1092 Returns the path to largefile from Mercurial/Git-lfs storage.
1090 1093 or None if it's not a largefile node
1091 1094 """
1092 1095 return None
1093 1096
1094 1097 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1095 1098 prefix=None, write_metadata=False, mtime=None):
1096 1099 """
1097 1100 Creates an archive containing the contents of the repository.
1098 1101
1099 1102 :param file_path: path to the file which to create the archive.
1100 1103 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1101 1104 :param prefix: name of root directory in archive.
1102 1105 Default is repository name and commit's short_id joined with dash:
1103 1106 ``"{repo_name}-{short_id}"``.
1104 1107 :param write_metadata: write a metadata file into archive.
1105 1108 :param mtime: custom modification time for archive creation, defaults
1106 1109 to time.time() if not given.
1107 1110
1108 1111 :raise VCSError: If prefix has a problem.
1109 1112 """
1110 1113 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1111 1114 if kind not in allowed_kinds:
1112 1115 raise ImproperArchiveTypeError(
1113 1116 'Archive kind (%s) not supported use one of %s' %
1114 1117 (kind, allowed_kinds))
1115 1118
1116 1119 prefix = self._validate_archive_prefix(prefix)
1117 1120
1118 1121 mtime = mtime or time.mktime(self.date.timetuple())
1119 1122
1120 1123 file_info = []
1121 1124 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1122 1125 for _r, _d, files in cur_rev.walk('/'):
1123 1126 for f in files:
1124 1127 f_path = os.path.join(prefix, f.path)
1125 1128 file_info.append(
1126 1129 (f_path, f.mode, f.is_link(), f.raw_bytes))
1127 1130
1128 1131 if write_metadata:
1129 1132 metadata = [
1130 1133 ('repo_name', self.repository.name),
1131 1134 ('rev', self.raw_id),
1132 1135 ('create_time', mtime),
1133 1136 ('branch', self.branch),
1134 1137 ('tags', ','.join(self.tags)),
1135 1138 ]
1136 1139 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1137 1140 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1138 1141
1139 1142 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1140 1143
1141 1144 def _validate_archive_prefix(self, prefix):
1142 1145 if prefix is None:
1143 1146 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1144 1147 repo_name=safe_str(self.repository.name),
1145 1148 short_id=self.short_id)
1146 1149 elif not isinstance(prefix, str):
1147 1150 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1148 1151 elif prefix.startswith('/'):
1149 1152 raise VCSError("Prefix cannot start with leading slash")
1150 1153 elif prefix.strip() == '':
1151 1154 raise VCSError("Prefix cannot be empty")
1152 1155 return prefix
1153 1156
1154 1157 @LazyProperty
1155 1158 def root(self):
1156 1159 """
1157 1160 Returns ``RootNode`` object for this commit.
1158 1161 """
1159 1162 return self.get_node('')
1160 1163
1161 1164 def next(self, branch=None):
1162 1165 """
1163 1166 Returns next commit from current, if branch is gives it will return
1164 1167 next commit belonging to this branch
1165 1168
1166 1169 :param branch: show commits within the given named branch
1167 1170 """
1168 1171 indexes = xrange(self.idx + 1, self.repository.count())
1169 1172 return self._find_next(indexes, branch)
1170 1173
1171 1174 def prev(self, branch=None):
1172 1175 """
1173 1176 Returns previous commit from current, if branch is gives it will
1174 1177 return previous commit belonging to this branch
1175 1178
1176 1179 :param branch: show commit within the given named branch
1177 1180 """
1178 1181 indexes = xrange(self.idx - 1, -1, -1)
1179 1182 return self._find_next(indexes, branch)
1180 1183
1181 1184 def _find_next(self, indexes, branch=None):
1182 1185 if branch and self.branch != branch:
1183 1186 raise VCSError('Branch option used on commit not belonging '
1184 1187 'to that branch')
1185 1188
1186 1189 for next_idx in indexes:
1187 1190 commit = self.repository.get_commit(commit_idx=next_idx)
1188 1191 if branch and branch != commit.branch:
1189 1192 continue
1190 1193 return commit
1191 1194 raise CommitDoesNotExistError
1192 1195
1193 1196 def diff(self, ignore_whitespace=True, context=3):
1194 1197 """
1195 1198 Returns a `Diff` object representing the change made by this commit.
1196 1199 """
1197 1200 parent = self.first_parent
1198 1201 diff = self.repository.get_diff(
1199 1202 parent, self,
1200 1203 ignore_whitespace=ignore_whitespace,
1201 1204 context=context)
1202 1205 return diff
1203 1206
1204 1207 @LazyProperty
1205 1208 def added(self):
1206 1209 """
1207 1210 Returns list of added ``FileNode`` objects.
1208 1211 """
1209 1212 raise NotImplementedError
1210 1213
1211 1214 @LazyProperty
1212 1215 def changed(self):
1213 1216 """
1214 1217 Returns list of modified ``FileNode`` objects.
1215 1218 """
1216 1219 raise NotImplementedError
1217 1220
1218 1221 @LazyProperty
1219 1222 def removed(self):
1220 1223 """
1221 1224 Returns list of removed ``FileNode`` objects.
1222 1225 """
1223 1226 raise NotImplementedError
1224 1227
1225 1228 @LazyProperty
1226 1229 def size(self):
1227 1230 """
1228 1231 Returns total number of bytes from contents of all filenodes.
1229 1232 """
1230 1233 return sum((node.size for node in self.get_filenodes_generator()))
1231 1234
1232 1235 def walk(self, topurl=''):
1233 1236 """
1234 1237 Similar to os.walk method. Insted of filesystem it walks through
1235 1238 commit starting at given ``topurl``. Returns generator of tuples
1236 1239 (topnode, dirnodes, filenodes).
1237 1240 """
1238 1241 topnode = self.get_node(topurl)
1239 1242 if not topnode.is_dir():
1240 1243 return
1241 1244 yield (topnode, topnode.dirs, topnode.files)
1242 1245 for dirnode in topnode.dirs:
1243 1246 for tup in self.walk(dirnode.path):
1244 1247 yield tup
1245 1248
1246 1249 def get_filenodes_generator(self):
1247 1250 """
1248 1251 Returns generator that yields *all* file nodes.
1249 1252 """
1250 1253 for topnode, dirs, files in self.walk():
1251 1254 for node in files:
1252 1255 yield node
1253 1256
1254 1257 #
1255 1258 # Utilities for sub classes to support consistent behavior
1256 1259 #
1257 1260
1258 1261 def no_node_at_path(self, path):
1259 1262 return NodeDoesNotExistError(
1260 1263 u"There is no file nor directory at the given path: "
1261 1264 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1262 1265
1263 1266 def _fix_path(self, path):
1264 1267 """
1265 1268 Paths are stored without trailing slash so we need to get rid off it if
1266 1269 needed.
1267 1270 """
1268 1271 return path.rstrip('/')
1269 1272
1270 1273 #
1271 1274 # Deprecated API based on changesets
1272 1275 #
1273 1276
1274 1277 @property
1275 1278 def revision(self):
1276 1279 warnings.warn("Use idx instead", DeprecationWarning)
1277 1280 return self.idx
1278 1281
1279 1282 @revision.setter
1280 1283 def revision(self, value):
1281 1284 warnings.warn("Use idx instead", DeprecationWarning)
1282 1285 self.idx = value
1283 1286
1284 1287 def get_file_changeset(self, path):
1285 1288 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1286 1289 return self.get_path_commit(path)
1287 1290
1288 1291
1289 1292 class BaseChangesetClass(type):
1290 1293
1291 1294 def __instancecheck__(self, instance):
1292 1295 return isinstance(instance, BaseCommit)
1293 1296
1294 1297
1295 1298 class BaseChangeset(BaseCommit):
1296 1299
1297 1300 __metaclass__ = BaseChangesetClass
1298 1301
1299 1302 def __new__(cls, *args, **kwargs):
1300 1303 warnings.warn(
1301 1304 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1302 1305 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1303 1306
1304 1307
1305 1308 class BaseInMemoryCommit(object):
1306 1309 """
1307 1310 Represents differences between repository's state (most recent head) and
1308 1311 changes made *in place*.
1309 1312
1310 1313 **Attributes**
1311 1314
1312 1315 ``repository``
1313 1316 repository object for this in-memory-commit
1314 1317
1315 1318 ``added``
1316 1319 list of ``FileNode`` objects marked as *added*
1317 1320
1318 1321 ``changed``
1319 1322 list of ``FileNode`` objects marked as *changed*
1320 1323
1321 1324 ``removed``
1322 1325 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1323 1326 *removed*
1324 1327
1325 1328 ``parents``
1326 1329 list of :class:`BaseCommit` instances representing parents of
1327 1330 in-memory commit. Should always be 2-element sequence.
1328 1331
1329 1332 """
1330 1333
1331 1334 def __init__(self, repository):
1332 1335 self.repository = repository
1333 1336 self.added = []
1334 1337 self.changed = []
1335 1338 self.removed = []
1336 1339 self.parents = []
1337 1340
1338 1341 def add(self, *filenodes):
1339 1342 """
1340 1343 Marks given ``FileNode`` objects as *to be committed*.
1341 1344
1342 1345 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1343 1346 latest commit
1344 1347 :raises ``NodeAlreadyAddedError``: if node with same path is already
1345 1348 marked as *added*
1346 1349 """
1347 1350 # Check if not already marked as *added* first
1348 1351 for node in filenodes:
1349 1352 if node.path in (n.path for n in self.added):
1350 1353 raise NodeAlreadyAddedError(
1351 1354 "Such FileNode %s is already marked for addition"
1352 1355 % node.path)
1353 1356 for node in filenodes:
1354 1357 self.added.append(node)
1355 1358
1356 1359 def change(self, *filenodes):
1357 1360 """
1358 1361 Marks given ``FileNode`` objects to be *changed* in next commit.
1359 1362
1360 1363 :raises ``EmptyRepositoryError``: if there are no commits yet
1361 1364 :raises ``NodeAlreadyExistsError``: if node with same path is already
1362 1365 marked to be *changed*
1363 1366 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1364 1367 marked to be *removed*
1365 1368 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1366 1369 commit
1367 1370 :raises ``NodeNotChangedError``: if node hasn't really be changed
1368 1371 """
1369 1372 for node in filenodes:
1370 1373 if node.path in (n.path for n in self.removed):
1371 1374 raise NodeAlreadyRemovedError(
1372 1375 "Node at %s is already marked as removed" % node.path)
1373 1376 try:
1374 1377 self.repository.get_commit()
1375 1378 except EmptyRepositoryError:
1376 1379 raise EmptyRepositoryError(
1377 1380 "Nothing to change - try to *add* new nodes rather than "
1378 1381 "changing them")
1379 1382 for node in filenodes:
1380 1383 if node.path in (n.path for n in self.changed):
1381 1384 raise NodeAlreadyChangedError(
1382 1385 "Node at '%s' is already marked as changed" % node.path)
1383 1386 self.changed.append(node)
1384 1387
1385 1388 def remove(self, *filenodes):
1386 1389 """
1387 1390 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1388 1391 *removed* in next commit.
1389 1392
1390 1393 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1391 1394 be *removed*
1392 1395 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1393 1396 be *changed*
1394 1397 """
1395 1398 for node in filenodes:
1396 1399 if node.path in (n.path for n in self.removed):
1397 1400 raise NodeAlreadyRemovedError(
1398 1401 "Node is already marked to for removal at %s" % node.path)
1399 1402 if node.path in (n.path for n in self.changed):
1400 1403 raise NodeAlreadyChangedError(
1401 1404 "Node is already marked to be changed at %s" % node.path)
1402 1405 # We only mark node as *removed* - real removal is done by
1403 1406 # commit method
1404 1407 self.removed.append(node)
1405 1408
1406 1409 def reset(self):
1407 1410 """
1408 1411 Resets this instance to initial state (cleans ``added``, ``changed``
1409 1412 and ``removed`` lists).
1410 1413 """
1411 1414 self.added = []
1412 1415 self.changed = []
1413 1416 self.removed = []
1414 1417 self.parents = []
1415 1418
1416 1419 def get_ipaths(self):
1417 1420 """
1418 1421 Returns generator of paths from nodes marked as added, changed or
1419 1422 removed.
1420 1423 """
1421 1424 for node in itertools.chain(self.added, self.changed, self.removed):
1422 1425 yield node.path
1423 1426
1424 1427 def get_paths(self):
1425 1428 """
1426 1429 Returns list of paths from nodes marked as added, changed or removed.
1427 1430 """
1428 1431 return list(self.get_ipaths())
1429 1432
1430 1433 def check_integrity(self, parents=None):
1431 1434 """
1432 1435 Checks in-memory commit's integrity. Also, sets parents if not
1433 1436 already set.
1434 1437
1435 1438 :raises CommitError: if any error occurs (i.e.
1436 1439 ``NodeDoesNotExistError``).
1437 1440 """
1438 1441 if not self.parents:
1439 1442 parents = parents or []
1440 1443 if len(parents) == 0:
1441 1444 try:
1442 1445 parents = [self.repository.get_commit(), None]
1443 1446 except EmptyRepositoryError:
1444 1447 parents = [None, None]
1445 1448 elif len(parents) == 1:
1446 1449 parents += [None]
1447 1450 self.parents = parents
1448 1451
1449 1452 # Local parents, only if not None
1450 1453 parents = [p for p in self.parents if p]
1451 1454
1452 1455 # Check nodes marked as added
1453 1456 for p in parents:
1454 1457 for node in self.added:
1455 1458 try:
1456 1459 p.get_node(node.path)
1457 1460 except NodeDoesNotExistError:
1458 1461 pass
1459 1462 else:
1460 1463 raise NodeAlreadyExistsError(
1461 1464 "Node `%s` already exists at %s" % (node.path, p))
1462 1465
1463 1466 # Check nodes marked as changed
1464 1467 missing = set(self.changed)
1465 1468 not_changed = set(self.changed)
1466 1469 if self.changed and not parents:
1467 1470 raise NodeDoesNotExistError(str(self.changed[0].path))
1468 1471 for p in parents:
1469 1472 for node in self.changed:
1470 1473 try:
1471 1474 old = p.get_node(node.path)
1472 1475 missing.remove(node)
1473 1476 # if content actually changed, remove node from not_changed
1474 1477 if old.content != node.content:
1475 1478 not_changed.remove(node)
1476 1479 except NodeDoesNotExistError:
1477 1480 pass
1478 1481 if self.changed and missing:
1479 1482 raise NodeDoesNotExistError(
1480 1483 "Node `%s` marked as modified but missing in parents: %s"
1481 1484 % (node.path, parents))
1482 1485
1483 1486 if self.changed and not_changed:
1484 1487 raise NodeNotChangedError(
1485 1488 "Node `%s` wasn't actually changed (parents: %s)"
1486 1489 % (not_changed.pop().path, parents))
1487 1490
1488 1491 # Check nodes marked as removed
1489 1492 if self.removed and not parents:
1490 1493 raise NodeDoesNotExistError(
1491 1494 "Cannot remove node at %s as there "
1492 1495 "were no parents specified" % self.removed[0].path)
1493 1496 really_removed = set()
1494 1497 for p in parents:
1495 1498 for node in self.removed:
1496 1499 try:
1497 1500 p.get_node(node.path)
1498 1501 really_removed.add(node)
1499 1502 except CommitError:
1500 1503 pass
1501 1504 not_removed = set(self.removed) - really_removed
1502 1505 if not_removed:
1503 1506 # TODO: johbo: This code branch does not seem to be covered
1504 1507 raise NodeDoesNotExistError(
1505 1508 "Cannot remove node at %s from "
1506 1509 "following parents: %s" % (not_removed, parents))
1507 1510
1508 1511 def commit(
1509 1512 self, message, author, parents=None, branch=None, date=None,
1510 1513 **kwargs):
1511 1514 """
1512 1515 Performs in-memory commit (doesn't check workdir in any way) and
1513 1516 returns newly created :class:`BaseCommit`. Updates repository's
1514 1517 attribute `commits`.
1515 1518
1516 1519 .. note::
1517 1520
1518 1521 While overriding this method each backend's should call
1519 1522 ``self.check_integrity(parents)`` in the first place.
1520 1523
1521 1524 :param message: message of the commit
1522 1525 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1523 1526 :param parents: single parent or sequence of parents from which commit
1524 1527 would be derived
1525 1528 :param date: ``datetime.datetime`` instance. Defaults to
1526 1529 ``datetime.datetime.now()``.
1527 1530 :param branch: branch name, as string. If none given, default backend's
1528 1531 branch would be used.
1529 1532
1530 1533 :raises ``CommitError``: if any error occurs while committing
1531 1534 """
1532 1535 raise NotImplementedError
1533 1536
1534 1537
1535 1538 class BaseInMemoryChangesetClass(type):
1536 1539
1537 1540 def __instancecheck__(self, instance):
1538 1541 return isinstance(instance, BaseInMemoryCommit)
1539 1542
1540 1543
1541 1544 class BaseInMemoryChangeset(BaseInMemoryCommit):
1542 1545
1543 1546 __metaclass__ = BaseInMemoryChangesetClass
1544 1547
1545 1548 def __new__(cls, *args, **kwargs):
1546 1549 warnings.warn(
1547 1550 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1548 1551 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1549 1552
1550 1553
1551 1554 class EmptyCommit(BaseCommit):
1552 1555 """
1553 1556 An dummy empty commit. It's possible to pass hash when creating
1554 1557 an EmptyCommit
1555 1558 """
1556 1559
1557 1560 def __init__(
1558 1561 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1559 1562 message='', author='', date=None):
1560 1563 self._empty_commit_id = commit_id
1561 1564 # TODO: johbo: Solve idx parameter, default value does not make
1562 1565 # too much sense
1563 1566 self.idx = idx
1564 1567 self.message = message
1565 1568 self.author = author
1566 1569 self.date = date or datetime.datetime.fromtimestamp(0)
1567 1570 self.repository = repo
1568 1571 self.alias = alias
1569 1572
1570 1573 @LazyProperty
1571 1574 def raw_id(self):
1572 1575 """
1573 1576 Returns raw string identifying this commit, useful for web
1574 1577 representation.
1575 1578 """
1576 1579
1577 1580 return self._empty_commit_id
1578 1581
1579 1582 @LazyProperty
1580 1583 def branch(self):
1581 1584 if self.alias:
1582 1585 from rhodecode.lib.vcs.backends import get_backend
1583 1586 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1584 1587
1585 1588 @LazyProperty
1586 1589 def short_id(self):
1587 1590 return self.raw_id[:12]
1588 1591
1589 1592 @LazyProperty
1590 1593 def id(self):
1591 1594 return self.raw_id
1592 1595
1593 1596 def get_path_commit(self, path):
1594 1597 return self
1595 1598
1596 1599 def get_file_content(self, path):
1597 1600 return u''
1598 1601
1599 1602 def get_file_size(self, path):
1600 1603 return 0
1601 1604
1602 1605
1603 1606 class EmptyChangesetClass(type):
1604 1607
1605 1608 def __instancecheck__(self, instance):
1606 1609 return isinstance(instance, EmptyCommit)
1607 1610
1608 1611
1609 1612 class EmptyChangeset(EmptyCommit):
1610 1613
1611 1614 __metaclass__ = EmptyChangesetClass
1612 1615
1613 1616 def __new__(cls, *args, **kwargs):
1614 1617 warnings.warn(
1615 1618 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1616 1619 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1617 1620
1618 1621 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1619 1622 alias=None, revision=-1, message='', author='', date=None):
1620 1623 if requested_revision is not None:
1621 1624 warnings.warn(
1622 1625 "Parameter requested_revision not supported anymore",
1623 1626 DeprecationWarning)
1624 1627 super(EmptyChangeset, self).__init__(
1625 1628 commit_id=cs, repo=repo, alias=alias, idx=revision,
1626 1629 message=message, author=author, date=date)
1627 1630
1628 1631 @property
1629 1632 def revision(self):
1630 1633 warnings.warn("Use idx instead", DeprecationWarning)
1631 1634 return self.idx
1632 1635
1633 1636 @revision.setter
1634 1637 def revision(self, value):
1635 1638 warnings.warn("Use idx instead", DeprecationWarning)
1636 1639 self.idx = value
1637 1640
1638 1641
1639 1642 class EmptyRepository(BaseRepository):
1640 1643 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1641 1644 pass
1642 1645
1643 1646 def get_diff(self, *args, **kwargs):
1644 1647 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1645 1648 return GitDiff('')
1646 1649
1647 1650
1648 1651 class CollectionGenerator(object):
1649 1652
1650 1653 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1651 1654 self.repo = repo
1652 1655 self.commit_ids = commit_ids
1653 1656 # TODO: (oliver) this isn't currently hooked up
1654 1657 self.collection_size = None
1655 1658 self.pre_load = pre_load
1656 1659 self.translate_tag = translate_tag
1657 1660
1658 1661 def __len__(self):
1659 1662 if self.collection_size is not None:
1660 1663 return self.collection_size
1661 1664 return self.commit_ids.__len__()
1662 1665
1663 1666 def __iter__(self):
1664 1667 for commit_id in self.commit_ids:
1665 1668 # TODO: johbo: Mercurial passes in commit indices or commit ids
1666 1669 yield self._commit_factory(commit_id)
1667 1670
1668 1671 def _commit_factory(self, commit_id):
1669 1672 """
1670 1673 Allows backends to override the way commits are generated.
1671 1674 """
1672 1675 return self.repo.get_commit(
1673 1676 commit_id=commit_id, pre_load=self.pre_load,
1674 1677 translate_tag=self.translate_tag)
1675 1678
1676 1679 def __getslice__(self, i, j):
1677 1680 """
1678 1681 Returns an iterator of sliced repository
1679 1682 """
1680 1683 commit_ids = self.commit_ids[i:j]
1681 1684 return self.__class__(
1682 1685 self.repo, commit_ids, pre_load=self.pre_load,
1683 1686 translate_tag=self.translate_tag)
1684 1687
1685 1688 def __repr__(self):
1686 1689 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1687 1690
1688 1691
1689 1692 class Config(object):
1690 1693 """
1691 1694 Represents the configuration for a repository.
1692 1695
1693 1696 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1694 1697 standard library. It implements only the needed subset.
1695 1698 """
1696 1699
1697 1700 def __init__(self):
1698 1701 self._values = {}
1699 1702
1700 1703 def copy(self):
1701 1704 clone = Config()
1702 1705 for section, values in self._values.items():
1703 1706 clone._values[section] = values.copy()
1704 1707 return clone
1705 1708
1706 1709 def __repr__(self):
1707 1710 return '<Config(%s sections) at %s>' % (
1708 1711 len(self._values), hex(id(self)))
1709 1712
1710 1713 def items(self, section):
1711 1714 return self._values.get(section, {}).iteritems()
1712 1715
1713 1716 def get(self, section, option):
1714 1717 return self._values.get(section, {}).get(option)
1715 1718
1716 1719 def set(self, section, option, value):
1717 1720 section_values = self._values.setdefault(section, {})
1718 1721 section_values[option] = value
1719 1722
1720 1723 def clear_section(self, section):
1721 1724 self._values[section] = {}
1722 1725
1723 1726 def serialize(self):
1724 1727 """
1725 1728 Creates a list of three tuples (section, key, value) representing
1726 1729 this config object.
1727 1730 """
1728 1731 items = []
1729 1732 for section in self._values:
1730 1733 for option, value in self._values[section].items():
1731 1734 items.append(
1732 1735 (safe_str(section), safe_str(option), safe_str(value)))
1733 1736 return items
1734 1737
1735 1738
1736 1739 class Diff(object):
1737 1740 """
1738 1741 Represents a diff result from a repository backend.
1739 1742
1740 1743 Subclasses have to provide a backend specific value for
1741 1744 :attr:`_header_re` and :attr:`_meta_re`.
1742 1745 """
1743 1746 _meta_re = None
1744 1747 _header_re = None
1745 1748
1746 1749 def __init__(self, raw_diff):
1747 1750 self.raw = raw_diff
1748 1751
1749 1752 def chunks(self):
1750 1753 """
1751 1754 split the diff in chunks of separate --git a/file b/file chunks
1752 1755 to make diffs consistent we must prepend with \n, and make sure
1753 1756 we can detect last chunk as this was also has special rule
1754 1757 """
1755 1758
1756 1759 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1757 1760 header = diff_parts[0]
1758 1761
1759 1762 if self._meta_re:
1760 1763 match = self._meta_re.match(header)
1761 1764
1762 1765 chunks = diff_parts[1:]
1763 1766 total_chunks = len(chunks)
1764 1767
1765 1768 return (
1766 1769 DiffChunk(chunk, self, cur_chunk == total_chunks)
1767 1770 for cur_chunk, chunk in enumerate(chunks, start=1))
1768 1771
1769 1772
1770 1773 class DiffChunk(object):
1771 1774
1772 1775 def __init__(self, chunk, diff, last_chunk):
1773 1776 self._diff = diff
1774 1777
1775 1778 # since we split by \ndiff --git that part is lost from original diff
1776 1779 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1777 1780 if not last_chunk:
1778 1781 chunk += '\n'
1779 1782
1780 1783 match = self._diff._header_re.match(chunk)
1781 1784 self.header = match.groupdict()
1782 1785 self.diff = chunk[match.end():]
1783 1786 self.raw = chunk
1784 1787
1785 1788
1786 1789 class BasePathPermissionChecker(object):
1787 1790
1788 1791 @staticmethod
1789 1792 def create_from_patterns(includes, excludes):
1790 1793 if includes and '*' in includes and not excludes:
1791 1794 return AllPathPermissionChecker()
1792 1795 elif excludes and '*' in excludes:
1793 1796 return NonePathPermissionChecker()
1794 1797 else:
1795 1798 return PatternPathPermissionChecker(includes, excludes)
1796 1799
1797 1800 @property
1798 1801 def has_full_access(self):
1799 1802 raise NotImplemented()
1800 1803
1801 1804 def has_access(self, path):
1802 1805 raise NotImplemented()
1803 1806
1804 1807
1805 1808 class AllPathPermissionChecker(BasePathPermissionChecker):
1806 1809
1807 1810 @property
1808 1811 def has_full_access(self):
1809 1812 return True
1810 1813
1811 1814 def has_access(self, path):
1812 1815 return True
1813 1816
1814 1817
1815 1818 class NonePathPermissionChecker(BasePathPermissionChecker):
1816 1819
1817 1820 @property
1818 1821 def has_full_access(self):
1819 1822 return False
1820 1823
1821 1824 def has_access(self, path):
1822 1825 return False
1823 1826
1824 1827
1825 1828 class PatternPathPermissionChecker(BasePathPermissionChecker):
1826 1829
1827 1830 def __init__(self, includes, excludes):
1828 1831 self.includes = includes
1829 1832 self.excludes = excludes
1830 1833 self.includes_re = [] if not includes else [
1831 1834 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1832 1835 self.excludes_re = [] if not excludes else [
1833 1836 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1834 1837
1835 1838 @property
1836 1839 def has_full_access(self):
1837 1840 return '*' in self.includes and not self.excludes
1838 1841
1839 1842 def has_access(self, path):
1840 1843 for regex in self.excludes_re:
1841 1844 if regex.match(path):
1842 1845 return False
1843 1846 for regex in self.includes_re:
1844 1847 if regex.match(path):
1845 1848 return True
1846 1849 return False
@@ -1,932 +1,937 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 do_workspace_checkout=False, with_wire=None, bare=False):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param do_workspace_checkout=False: sets update of working copy after
71 71 making a clone
72 72 :param bare: not used, compatible with other VCS
73 73 """
74 74
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 # mercurial since 4.4.X requires certain configuration to be present
77 77 # because sometimes we init the repos with config we need to meet
78 78 # special requirements
79 79 self.config = config if config else self.get_default_config(
80 80 default=[('extensions', 'largefiles', '1')])
81 81 self.with_wire = with_wire
82 82
83 83 self._init_repo(create, src_url, do_workspace_checkout)
84 84
85 85 # caches
86 86 self._commit_ids = {}
87 87
88 88 @LazyProperty
89 89 def _remote(self):
90 90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 91
92 92 @LazyProperty
93 93 def commit_ids(self):
94 94 """
95 95 Returns list of commit ids, in ascending order. Being lazy
96 96 attribute allows external tools to inject shas from cache.
97 97 """
98 98 commit_ids = self._get_all_commit_ids()
99 99 self._rebuild_cache(commit_ids)
100 100 return commit_ids
101 101
102 102 def _rebuild_cache(self, commit_ids):
103 103 self._commit_ids = dict((commit_id, index)
104 104 for index, commit_id in enumerate(commit_ids))
105 105
106 106 @LazyProperty
107 107 def branches(self):
108 108 return self._get_branches()
109 109
110 110 @LazyProperty
111 111 def branches_closed(self):
112 112 return self._get_branches(active=False, closed=True)
113 113
114 114 @LazyProperty
115 115 def branches_all(self):
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 def _get_branches(self, active=True, closed=False):
122 122 """
123 123 Gets branches for this repository
124 124 Returns only not closed active branches by default
125 125
126 126 :param active: return also active branches
127 127 :param closed: return also closed branches
128 128
129 129 """
130 130 if self.is_empty():
131 131 return {}
132 132
133 133 def get_name(ctx):
134 134 return ctx[0]
135 135
136 136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 137 self._remote.branches(active, closed).items()]
138 138
139 139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 140
141 141 @LazyProperty
142 142 def tags(self):
143 143 """
144 144 Gets tags for this repository
145 145 """
146 146 return self._get_tags()
147 147
148 148 def _get_tags(self):
149 149 if self.is_empty():
150 150 return {}
151 151
152 152 def get_name(ctx):
153 153 return ctx[0]
154 154
155 155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 156 self._remote.tags().items()]
157 157
158 158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 159
160 160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 161 **kwargs):
162 162 """
163 163 Creates and returns a tag for the given ``commit_id``.
164 164
165 165 :param name: name for new tag
166 166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 167 :param commit_id: commit id for which new tag would be created
168 168 :param message: message of the tag's commit
169 169 :param date: date of tag's commit
170 170
171 171 :raises TagAlreadyExistError: if tag with same name already exists
172 172 """
173 173 if name in self.tags:
174 174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 175 commit = self.get_commit(commit_id=commit_id)
176 176 local = kwargs.setdefault('local', False)
177 177
178 178 if message is None:
179 179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 180
181 181 date, tz = date_to_timestamp_plus_offset(date)
182 182
183 183 self._remote.tag(
184 184 name, commit.raw_id, message, local, user, date, tz)
185 185 self._remote.invalidate_vcs_cache()
186 186
187 187 # Reinitialize tags
188 188 self.tags = self._get_tags()
189 189 tag_id = self.tags[name]
190 190
191 191 return self.get_commit(commit_id=tag_id)
192 192
193 193 def remove_tag(self, name, user, message=None, date=None):
194 194 """
195 195 Removes tag with the given `name`.
196 196
197 197 :param name: name of the tag to be removed
198 198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 199 :param message: message of the tag's removal commit
200 200 :param date: date of tag's removal commit
201 201
202 202 :raises TagDoesNotExistError: if tag with given name does not exists
203 203 """
204 204 if name not in self.tags:
205 205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 206 if message is None:
207 207 message = "Removed tag %s" % name
208 208 local = False
209 209
210 210 date, tz = date_to_timestamp_plus_offset(date)
211 211
212 212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 213 self._remote.invalidate_vcs_cache()
214 214 self.tags = self._get_tags()
215 215
216 216 @LazyProperty
217 217 def bookmarks(self):
218 218 """
219 219 Gets bookmarks for this repository
220 220 """
221 221 return self._get_bookmarks()
222 222
223 223 def _get_bookmarks(self):
224 224 if self.is_empty():
225 225 return {}
226 226
227 227 def get_name(ctx):
228 228 return ctx[0]
229 229
230 230 _bookmarks = [
231 231 (safe_unicode(n), hexlify(h)) for n, h in
232 232 self._remote.bookmarks().items()]
233 233
234 234 return OrderedDict(sorted(_bookmarks, key=get_name))
235 235
236 236 def _get_all_commit_ids(self):
237 237 return self._remote.get_all_commit_ids('visible')
238 238
239 239 def get_diff(
240 240 self, commit1, commit2, path='', ignore_whitespace=False,
241 241 context=3, path1=None):
242 242 """
243 243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 244 `commit2` since `commit1`.
245 245
246 246 :param commit1: Entry point from which diff is shown. Can be
247 247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 248 the changes since empty state of the repository until `commit2`
249 249 :param commit2: Until which commit changes should be shown.
250 250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 251 changes. Defaults to ``False``.
252 252 :param context: How many lines before/after changed lines should be
253 253 shown. Defaults to ``3``.
254 254 """
255 255 self._validate_diff_commits(commit1, commit2)
256 256 if path1 is not None and path1 != path:
257 257 raise ValueError("Diff of two different paths not supported.")
258 258
259 259 if path:
260 260 file_filter = [self.path, path]
261 261 else:
262 262 file_filter = None
263 263
264 264 diff = self._remote.diff(
265 265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 266 opt_git=True, opt_ignorews=ignore_whitespace,
267 267 context=context)
268 268 return MercurialDiff(diff)
269 269
270 270 def strip(self, commit_id, branch=None):
271 271 self._remote.strip(commit_id, update=False, backup="none")
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 self.commit_ids = self._get_all_commit_ids()
275 275 self._rebuild_cache(self.commit_ids)
276 276
277 277 def verify(self):
278 278 verify = self._remote.verify()
279 279
280 280 self._remote.invalidate_vcs_cache()
281 281 return verify
282 282
283 283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 284 if commit_id1 == commit_id2:
285 285 return commit_id1
286 286
287 287 ancestors = self._remote.revs_from_revspec(
288 288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 289 other_path=repo2.path)
290 290 return repo2[ancestors[0]].raw_id if ancestors else None
291 291
292 292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 293 if commit_id1 == commit_id2:
294 294 commits = []
295 295 else:
296 296 if merge:
297 297 indexes = self._remote.revs_from_revspec(
298 298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 300 else:
301 301 indexes = self._remote.revs_from_revspec(
302 302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 303 commit_id1, other_path=repo2.path)
304 304
305 305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 306 for idx in indexes]
307 307
308 308 return commits
309 309
310 310 @staticmethod
311 311 def check_url(url, config):
312 312 """
313 313 Function will check given url and try to verify if it's a valid
314 314 link. Sometimes it may happened that mercurial will issue basic
315 315 auth request that can cause whole API to hang when used from python
316 316 or other external calls.
317 317
318 318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 319 when the return code is non 200
320 320 """
321 321 # check first if it's not an local url
322 322 if os.path.isdir(url) or url.startswith('file:'):
323 323 return True
324 324
325 325 # Request the _remote to verify the url
326 326 return connection.Hg.check_url(url, config.serialize())
327 327
328 328 @staticmethod
329 329 def is_valid_repository(path):
330 330 return os.path.isdir(os.path.join(path, '.hg'))
331 331
332 332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 333 """
334 334 Function will check for mercurial repository in given path. If there
335 335 is no repository in that path it will raise an exception unless
336 336 `create` parameter is set to True - in that case repository would
337 337 be created.
338 338
339 339 If `src_url` is given, would try to clone repository from the
340 340 location at given clone_point. Additionally it'll make update to
341 341 working copy accordingly to `do_workspace_checkout` flag.
342 342 """
343 343 if create and os.path.exists(self.path):
344 344 raise RepositoryError(
345 345 "Cannot create repository at %s, location already exist"
346 346 % self.path)
347 347
348 348 if src_url:
349 349 url = str(self._get_url(src_url))
350 350 MercurialRepository.check_url(url, self.config)
351 351
352 352 self._remote.clone(url, self.path, do_workspace_checkout)
353 353
354 354 # Don't try to create if we've already cloned repo
355 355 create = False
356 356
357 357 if create:
358 358 os.makedirs(self.path, mode=0o755)
359 359
360 360 self._remote.localrepository(create)
361 361
362 362 @LazyProperty
363 363 def in_memory_commit(self):
364 364 return MercurialInMemoryCommit(self)
365 365
366 366 @LazyProperty
367 367 def description(self):
368 368 description = self._remote.get_config_value(
369 369 'web', 'description', untrusted=True)
370 370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371 371
372 372 @LazyProperty
373 373 def contact(self):
374 374 contact = (
375 375 self._remote.get_config_value("web", "contact") or
376 376 self._remote.get_config_value("ui", "username"))
377 377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378 378
379 379 @LazyProperty
380 380 def last_change(self):
381 381 """
382 382 Returns last change made on this repository as
383 383 `datetime.datetime` object.
384 384 """
385 385 try:
386 386 return self.get_commit().date
387 387 except RepositoryError:
388 388 tzoffset = makedate()[1]
389 389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390 390
391 391 def _get_fs_mtime(self):
392 392 # fallback to filesystem
393 393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 394 st_path = os.path.join(self.path, '.hg', "store")
395 395 if os.path.exists(cl_path):
396 396 return os.stat(cl_path).st_mtime
397 397 else:
398 398 return os.stat(st_path).st_mtime
399 399
400 400 def _get_url(self, url):
401 401 """
402 402 Returns normalized url. If schema is not given, would fall
403 403 to filesystem
404 404 (``file:///``) schema.
405 405 """
406 406 url = url.encode('utf8')
407 407 if url != 'default' and '://' not in url:
408 408 url = "file:" + urllib.pathname2url(url)
409 409 return url
410 410
411 411 def get_hook_location(self):
412 412 """
413 413 returns absolute path to location where hooks are stored
414 414 """
415 415 return os.path.join(self.path, '.hg', '.hgrc')
416 416
417 417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 418 """
419 419 Returns ``MercurialCommit`` object representing repository's
420 420 commit at the given `commit_id` or `commit_idx`.
421 421 """
422 422 if self.is_empty():
423 423 raise EmptyRepositoryError("There are no commits yet")
424 424
425 425 if commit_id is not None:
426 426 self._validate_commit_id(commit_id)
427 427 try:
428 428 idx = self._commit_ids[commit_id]
429 429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 430 except KeyError:
431 431 pass
432 432 elif commit_idx is not None:
433 433 self._validate_commit_idx(commit_idx)
434 434 try:
435 435 id_ = self.commit_ids[commit_idx]
436 436 if commit_idx < 0:
437 437 commit_idx += len(self.commit_ids)
438 438 return MercurialCommit(
439 439 self, id_, commit_idx, pre_load=pre_load)
440 440 except IndexError:
441 441 commit_id = commit_idx
442 442 else:
443 443 commit_id = "tip"
444 444
445 445 if isinstance(commit_id, unicode):
446 446 commit_id = safe_str(commit_id)
447 447
448 448 try:
449 449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 450 except CommitDoesNotExistError:
451 451 msg = "Commit %s does not exist for %s" % (
452 452 commit_id, self)
453 453 raise CommitDoesNotExistError(msg)
454 454
455 455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 456
457 457 def get_commits(
458 458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 460 """
461 461 Returns generator of ``MercurialCommit`` objects from start to end
462 462 (both are inclusive)
463 463
464 464 :param start_id: None, str(commit_id)
465 465 :param end_id: None, str(commit_id)
466 466 :param start_date: if specified, commits with commit date less than
467 467 ``start_date`` would be filtered out from returned set
468 468 :param end_date: if specified, commits with commit date greater than
469 469 ``end_date`` would be filtered out from returned set
470 470 :param branch_name: if specified, commits not reachable from given
471 471 branch would be filtered out from returned set
472 472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 473 Mercurial evolve
474 474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 475 exist.
476 476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 477 ``end`` could not be found.
478 478 """
479 479 # actually we should check now if it's not an empty repo
480 480 branch_ancestors = False
481 481 if self.is_empty():
482 482 raise EmptyRepositoryError("There are no commits yet")
483 483 self._validate_branch_name(branch_name)
484 484
485 485 if start_id is not None:
486 486 self._validate_commit_id(start_id)
487 487 c_start = self.get_commit(commit_id=start_id)
488 488 start_pos = self._commit_ids[c_start.raw_id]
489 489 else:
490 490 start_pos = None
491 491
492 492 if end_id is not None:
493 493 self._validate_commit_id(end_id)
494 494 c_end = self.get_commit(commit_id=end_id)
495 495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 496 else:
497 497 end_pos = None
498 498
499 499 if None not in [start_id, end_id] and start_pos > end_pos:
500 500 raise RepositoryError(
501 501 "Start commit '%s' cannot be after end commit '%s'" %
502 502 (start_id, end_id))
503 503
504 504 if end_pos is not None:
505 505 end_pos += 1
506 506
507 507 commit_filter = []
508 508
509 509 if branch_name and not branch_ancestors:
510 510 commit_filter.append('branch("%s")' % (branch_name,))
511 511 elif branch_name and branch_ancestors:
512 512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 513
514 514 if start_date and not end_date:
515 515 commit_filter.append('date(">%s")' % (start_date,))
516 516 if end_date and not start_date:
517 517 commit_filter.append('date("<%s")' % (end_date,))
518 518 if start_date and end_date:
519 519 commit_filter.append(
520 520 'date(">%s") and date("<%s")' % (start_date, end_date))
521 521
522 522 if not show_hidden:
523 523 commit_filter.append('not obsolete()')
524 524 commit_filter.append('not hidden()')
525 525
526 526 # TODO: johbo: Figure out a simpler way for this solution
527 527 collection_generator = CollectionGenerator
528 528 if commit_filter:
529 529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 530 revisions = self._remote.rev_range([commit_filter])
531 531 collection_generator = MercurialIndexBasedCollectionGenerator
532 532 else:
533 533 revisions = self.commit_ids
534 534
535 535 if start_pos or end_pos:
536 536 revisions = revisions[start_pos:end_pos]
537 537
538 538 return collection_generator(self, revisions, pre_load=pre_load)
539 539
540 540 def pull(self, url, commit_ids=None):
541 541 """
542 542 Pull changes from external location.
543 543
544 544 :param commit_ids: Optional. Can be set to a list of commit ids
545 545 which shall be pulled from the other repository.
546 546 """
547 547 url = self._get_url(url)
548 548 self._remote.pull(url, commit_ids=commit_ids)
549 549 self._remote.invalidate_vcs_cache()
550 550
551 551 def fetch(self, url, commit_ids=None):
552 552 """
553 553 Backward compatibility with GIT fetch==pull
554 554 """
555 555 return self.pull(url, commit_ids=commit_ids)
556 556
557 557 def push(self, url):
558 558 url = self._get_url(url)
559 559 self._remote.sync_push(url)
560 560
561 561 def _local_clone(self, clone_path):
562 562 """
563 563 Create a local clone of the current repo.
564 564 """
565 565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 566 hooks=False)
567 567
568 568 def _update(self, revision, clean=False):
569 569 """
570 570 Update the working copy to the specified revision.
571 571 """
572 572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 573 self._remote.update(revision, clean=clean)
574 574
575 575 def _identify(self):
576 576 """
577 577 Return the current state of the working directory.
578 578 """
579 579 return self._remote.identify().strip().rstrip('+')
580 580
581 581 def _heads(self, branch=None):
582 582 """
583 583 Return the commit ids of the repository heads.
584 584 """
585 585 return self._remote.heads(branch=branch).strip().split(' ')
586 586
587 587 def _ancestor(self, revision1, revision2):
588 588 """
589 589 Return the common ancestor of the two revisions.
590 590 """
591 591 return self._remote.ancestor(revision1, revision2)
592 592
593 593 def _local_push(
594 594 self, revision, repository_path, push_branches=False,
595 595 enable_hooks=False):
596 596 """
597 597 Push the given revision to the specified repository.
598 598
599 599 :param push_branches: allow to create branches in the target repo.
600 600 """
601 601 self._remote.push(
602 602 [revision], repository_path, hooks=enable_hooks,
603 603 push_branches=push_branches)
604 604
605 605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 606 source_ref, use_rebase=False, dry_run=False):
607 607 """
608 608 Merge the given source_revision into the checked out revision.
609 609
610 610 Returns the commit id of the merge and a boolean indicating if the
611 611 commit needs to be pushed.
612 612 """
613 613 self._update(target_ref.commit_id, clean=True)
614 614
615 615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 617
618 618 if ancestor == source_ref.commit_id:
619 619 # Nothing to do, the changes were already integrated
620 620 return target_ref.commit_id, False
621 621
622 622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 623 # In this case we should force a commit message
624 624 return source_ref.commit_id, True
625 625
626 626 if use_rebase:
627 627 try:
628 628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 629 target_ref.commit_id)
630 630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 631 self._remote.rebase(
632 632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 633 self._remote.invalidate_vcs_cache()
634 634 self._update(bookmark_name, clean=True)
635 635 return self._identify(), True
636 636 except RepositoryError:
637 637 # The rebase-abort may raise another exception which 'hides'
638 638 # the original one, therefore we log it here.
639 639 log.exception('Error while rebasing shadow repo during merge.')
640 640
641 641 # Cleanup any rebase leftovers
642 642 self._remote.invalidate_vcs_cache()
643 643 self._remote.rebase(abort=True)
644 644 self._remote.invalidate_vcs_cache()
645 645 self._remote.update(clean=True)
646 646 raise
647 647 else:
648 648 try:
649 649 self._remote.merge(source_ref.commit_id)
650 650 self._remote.invalidate_vcs_cache()
651 651 self._remote.commit(
652 652 message=safe_str(merge_message),
653 653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 654 self._remote.invalidate_vcs_cache()
655 655 return self._identify(), True
656 656 except RepositoryError:
657 657 # Cleanup any merge leftovers
658 658 self._remote.update(clean=True)
659 659 raise
660 660
661 661 def _local_close(self, target_ref, user_name, user_email,
662 662 source_ref, close_message=''):
663 663 """
664 664 Close the branch of the given source_revision
665 665
666 666 Returns the commit id of the close and a boolean indicating if the
667 667 commit needs to be pushed.
668 668 """
669 669 self._update(source_ref.commit_id)
670 670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 671 try:
672 672 self._remote.commit(
673 673 message=safe_str(message),
674 674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 675 close_branch=True)
676 676 self._remote.invalidate_vcs_cache()
677 677 return self._identify(), True
678 678 except RepositoryError:
679 679 # Cleanup any commit leftovers
680 680 self._remote.update(clean=True)
681 681 raise
682 682
683 683 def _is_the_same_branch(self, target_ref, source_ref):
684 684 return (
685 685 self._get_branch_name(target_ref) ==
686 686 self._get_branch_name(source_ref))
687 687
688 688 def _get_branch_name(self, ref):
689 689 if ref.type == 'branch':
690 690 return ref.name
691 691 return self._remote.ctx_branch(ref.commit_id)
692 692
693 693 def _maybe_prepare_merge_workspace(
694 694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 695 shadow_repository_path = self._get_shadow_repository_path(
696 696 repo_id, workspace_id)
697 697 if not os.path.exists(shadow_repository_path):
698 698 self._local_clone(shadow_repository_path)
699 699 log.debug(
700 700 'Prepared shadow repository in %s', shadow_repository_path)
701 701
702 702 return shadow_repository_path
703 703
704 704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 705 source_repo, source_ref, merge_message,
706 706 merger_name, merger_email, dry_run=False,
707 707 use_rebase=False, close_branch=False):
708 708
709 709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 710 'rebase' if use_rebase else 'merge', dry_run)
711 711 if target_ref.commit_id not in self._heads():
712 712 return MergeResponse(
713 713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 714 metadata={'target_ref': target_ref})
715 715
716 716 try:
717 717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 heads = ','.join(self._heads(target_ref.name))
718 heads = '\n,'.join(self._heads(target_ref.name))
719 metadata = {
720 'target_ref': target_ref,
721 'source_ref': source_ref,
722 'heads': heads
723 }
719 724 return MergeResponse(
720 725 False, False, None,
721 726 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
722 metadata={'heads': heads})
727 metadata=metadata)
723 728 except CommitDoesNotExistError:
724 729 log.exception('Failure when looking up branch heads on hg target')
725 730 return MergeResponse(
726 731 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
727 732 metadata={'target_ref': target_ref})
728 733
729 734 shadow_repository_path = self._maybe_prepare_merge_workspace(
730 735 repo_id, workspace_id, target_ref, source_ref)
731 736 shadow_repo = self._get_shadow_instance(shadow_repository_path)
732 737
733 738 log.debug('Pulling in target reference %s', target_ref)
734 739 self._validate_pull_reference(target_ref)
735 740 shadow_repo._local_pull(self.path, target_ref)
736 741
737 742 try:
738 743 log.debug('Pulling in source reference %s', source_ref)
739 744 source_repo._validate_pull_reference(source_ref)
740 745 shadow_repo._local_pull(source_repo.path, source_ref)
741 746 except CommitDoesNotExistError:
742 747 log.exception('Failure when doing local pull on hg shadow repo')
743 748 return MergeResponse(
744 749 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
745 750 metadata={'source_ref': source_ref})
746 751
747 752 merge_ref = None
748 753 merge_commit_id = None
749 754 close_commit_id = None
750 755 merge_failure_reason = MergeFailureReason.NONE
751 756 metadata = {}
752 757
753 758 # enforce that close branch should be used only in case we source from
754 759 # an actual Branch
755 760 close_branch = close_branch and source_ref.type == 'branch'
756 761
757 762 # don't allow to close branch if source and target are the same
758 763 close_branch = close_branch and source_ref.name != target_ref.name
759 764
760 765 needs_push_on_close = False
761 766 if close_branch and not use_rebase and not dry_run:
762 767 try:
763 768 close_commit_id, needs_push_on_close = shadow_repo._local_close(
764 769 target_ref, merger_name, merger_email, source_ref)
765 770 merge_possible = True
766 771 except RepositoryError:
767 772 log.exception('Failure when doing close branch on '
768 773 'shadow repo: %s', shadow_repo)
769 774 merge_possible = False
770 775 merge_failure_reason = MergeFailureReason.MERGE_FAILED
771 776 else:
772 777 merge_possible = True
773 778
774 779 needs_push = False
775 780 if merge_possible:
776 781 try:
777 782 merge_commit_id, needs_push = shadow_repo._local_merge(
778 783 target_ref, merge_message, merger_name, merger_email,
779 784 source_ref, use_rebase=use_rebase, dry_run=dry_run)
780 785 merge_possible = True
781 786
782 787 # read the state of the close action, if it
783 788 # maybe required a push
784 789 needs_push = needs_push or needs_push_on_close
785 790
786 791 # Set a bookmark pointing to the merge commit. This bookmark
787 792 # may be used to easily identify the last successful merge
788 793 # commit in the shadow repository.
789 794 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
790 795 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
791 796 except SubrepoMergeError:
792 797 log.exception(
793 798 'Subrepo merge error during local merge on hg shadow repo.')
794 799 merge_possible = False
795 800 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
796 801 needs_push = False
797 802 except RepositoryError:
798 803 log.exception('Failure when doing local merge on hg shadow repo')
799 804 merge_possible = False
800 805 merge_failure_reason = MergeFailureReason.MERGE_FAILED
801 806 needs_push = False
802 807
803 808 if merge_possible and not dry_run:
804 809 if needs_push:
805 810 # In case the target is a bookmark, update it, so after pushing
806 811 # the bookmarks is also updated in the target.
807 812 if target_ref.type == 'book':
808 813 shadow_repo.bookmark(
809 814 target_ref.name, revision=merge_commit_id)
810 815 try:
811 816 shadow_repo_with_hooks = self._get_shadow_instance(
812 817 shadow_repository_path,
813 818 enable_hooks=True)
814 819 # This is the actual merge action, we push from shadow
815 820 # into origin.
816 821 # Note: the push_branches option will push any new branch
817 822 # defined in the source repository to the target. This may
818 823 # be dangerous as branches are permanent in Mercurial.
819 824 # This feature was requested in issue #441.
820 825 shadow_repo_with_hooks._local_push(
821 826 merge_commit_id, self.path, push_branches=True,
822 827 enable_hooks=True)
823 828
824 829 # maybe we also need to push the close_commit_id
825 830 if close_commit_id:
826 831 shadow_repo_with_hooks._local_push(
827 832 close_commit_id, self.path, push_branches=True,
828 833 enable_hooks=True)
829 834 merge_succeeded = True
830 835 except RepositoryError:
831 836 log.exception(
832 837 'Failure when doing local push from the shadow '
833 838 'repository to the target repository at %s.', self.path)
834 839 merge_succeeded = False
835 840 merge_failure_reason = MergeFailureReason.PUSH_FAILED
836 841 metadata['target'] = 'hg shadow repo'
837 842 metadata['merge_commit'] = merge_commit_id
838 843 else:
839 844 merge_succeeded = True
840 845 else:
841 846 merge_succeeded = False
842 847
843 848 return MergeResponse(
844 849 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
845 850 metadata=metadata)
846 851
847 852 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
848 853 config = self.config.copy()
849 854 if not enable_hooks:
850 855 config.clear_section('hooks')
851 856 return MercurialRepository(shadow_repository_path, config)
852 857
853 858 def _validate_pull_reference(self, reference):
854 859 if not (reference.name in self.bookmarks or
855 860 reference.name in self.branches or
856 861 self.get_commit(reference.commit_id)):
857 862 raise CommitDoesNotExistError(
858 863 'Unknown branch, bookmark or commit id')
859 864
860 865 def _local_pull(self, repository_path, reference):
861 866 """
862 867 Fetch a branch, bookmark or commit from a local repository.
863 868 """
864 869 repository_path = os.path.abspath(repository_path)
865 870 if repository_path == self.path:
866 871 raise ValueError('Cannot pull from the same repository')
867 872
868 873 reference_type_to_option_name = {
869 874 'book': 'bookmark',
870 875 'branch': 'branch',
871 876 }
872 877 option_name = reference_type_to_option_name.get(
873 878 reference.type, 'revision')
874 879
875 880 if option_name == 'revision':
876 881 ref = reference.commit_id
877 882 else:
878 883 ref = reference.name
879 884
880 885 options = {option_name: [ref]}
881 886 self._remote.pull_cmd(repository_path, hooks=False, **options)
882 887 self._remote.invalidate_vcs_cache()
883 888
884 889 def bookmark(self, bookmark, revision=None):
885 890 if isinstance(bookmark, unicode):
886 891 bookmark = safe_str(bookmark)
887 892 self._remote.bookmark(bookmark, revision=revision)
888 893 self._remote.invalidate_vcs_cache()
889 894
890 895 def get_path_permissions(self, username):
891 896 hgacl_file = os.path.join(self.path, '.hg/hgacl')
892 897
893 898 def read_patterns(suffix):
894 899 svalue = None
895 900 try:
896 901 svalue = hgacl.get('narrowhgacl', username + suffix)
897 902 except configparser.NoOptionError:
898 903 try:
899 904 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
900 905 except configparser.NoOptionError:
901 906 pass
902 907 if not svalue:
903 908 return None
904 909 result = ['/']
905 910 for pattern in svalue.split():
906 911 result.append(pattern)
907 912 if '*' not in pattern and '?' not in pattern:
908 913 result.append(pattern + '/*')
909 914 return result
910 915
911 916 if os.path.exists(hgacl_file):
912 917 try:
913 918 hgacl = configparser.RawConfigParser()
914 919 hgacl.read(hgacl_file)
915 920
916 921 includes = read_patterns('.includes')
917 922 excludes = read_patterns('.excludes')
918 923 return BasePathPermissionChecker.create_from_patterns(
919 924 includes, excludes)
920 925 except BaseException as e:
921 926 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
922 927 hgacl_file, self.name, e)
923 928 raise exceptions.RepositoryRequirementError(msg)
924 929 else:
925 930 return None
926 931
927 932
928 933 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
929 934
930 935 def _commit_factory(self, commit_id):
931 936 return self.repo.get_commit(
932 937 commit_idx=commit_id, pre_load=self.pre_load)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now