##// END OF EJS Templates
events: add serialization .to_dict() to events based on marshmallow
dan -
r379:a86e0931 default
parent child Browse files
Show More
@@ -0,0 +1,69 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from datetime import datetime
20 from marshmallow import Schema, fields
21 from pyramid.threadlocal import get_current_request
22 from rhodecode.lib.utils2 import AttributeDict
23
24
25 SYSTEM_USER = AttributeDict(dict(
26 username='__SYSTEM__'
27 ))
28
29
30 class UserSchema(Schema):
31 """
32 Marshmallow schema for a user
33 """
34 username = fields.Str()
35
36
37 class RhodecodeEventSchema(Schema):
38 """
39 Marshmallow schema for a rhodecode event
40 """
41 utc_timestamp = fields.DateTime()
42 acting_user = fields.Nested(UserSchema)
43 acting_ip = fields.Str()
44
45
46 class RhodecodeEvent(object):
47 """
48 Base event class for all Rhodecode events
49 """
50 MarshmallowSchema = RhodecodeEventSchema
51
52 def __init__(self):
53 self.request = get_current_request()
54 self.utc_timestamp = datetime.utcnow()
55
56 @property
57 def acting_user(self):
58 if self.request:
59 return self.request.user.get_instance()
60 return SYSTEM_USER
61
62 @property
63 def acting_ip(self):
64 if self.request:
65 return self.request.user.ip_addr
66 return '<no ip available>'
67
68 def as_dict(self):
69 return self.MarshmallowSchema().dump(self).data No newline at end of file
@@ -1,1641 +1,1654 b''
1 {
1 {
2 Babel = super.buildPythonPackage {
2 Babel = super.buildPythonPackage {
3 name = "Babel-1.3";
3 name = "Babel-1.3";
4 buildInputs = with self; [];
4 buildInputs = with self; [];
5 doCheck = false;
5 doCheck = false;
6 propagatedBuildInputs = with self; [pytz];
6 propagatedBuildInputs = with self; [pytz];
7 src = fetchurl {
7 src = fetchurl {
8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 };
10 };
11 meta = {
11 meta = {
12 license = [ pkgs.lib.licenses.bsdOriginal ];
12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 };
13 };
14 };
14 };
15 Beaker = super.buildPythonPackage {
15 Beaker = super.buildPythonPackage {
16 name = "Beaker-1.7.0";
16 name = "Beaker-1.7.0";
17 buildInputs = with self; [];
17 buildInputs = with self; [];
18 doCheck = false;
18 doCheck = false;
19 propagatedBuildInputs = with self; [];
19 propagatedBuildInputs = with self; [];
20 src = fetchurl {
20 src = fetchurl {
21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
22 md5 = "386be3f7fe427358881eee4622b428b3";
22 md5 = "386be3f7fe427358881eee4622b428b3";
23 };
23 };
24 meta = {
24 meta = {
25 license = [ pkgs.lib.licenses.bsdOriginal ];
25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 };
26 };
27 };
27 };
28 CProfileV = super.buildPythonPackage {
28 CProfileV = super.buildPythonPackage {
29 name = "CProfileV-1.0.6";
29 name = "CProfileV-1.0.6";
30 buildInputs = with self; [];
30 buildInputs = with self; [];
31 doCheck = false;
31 doCheck = false;
32 propagatedBuildInputs = with self; [bottle];
32 propagatedBuildInputs = with self; [bottle];
33 src = fetchurl {
33 src = fetchurl {
34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
36 };
36 };
37 meta = {
37 meta = {
38 license = [ pkgs.lib.licenses.mit ];
38 license = [ pkgs.lib.licenses.mit ];
39 };
39 };
40 };
40 };
41 Fabric = super.buildPythonPackage {
41 Fabric = super.buildPythonPackage {
42 name = "Fabric-1.10.0";
42 name = "Fabric-1.10.0";
43 buildInputs = with self; [];
43 buildInputs = with self; [];
44 doCheck = false;
44 doCheck = false;
45 propagatedBuildInputs = with self; [paramiko];
45 propagatedBuildInputs = with self; [paramiko];
46 src = fetchurl {
46 src = fetchurl {
47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
48 md5 = "2cb96473387f0e7aa035210892352f4a";
48 md5 = "2cb96473387f0e7aa035210892352f4a";
49 };
49 };
50 meta = {
50 meta = {
51 license = [ pkgs.lib.licenses.bsdOriginal ];
51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 };
52 };
53 };
53 };
54 FormEncode = super.buildPythonPackage {
54 FormEncode = super.buildPythonPackage {
55 name = "FormEncode-1.2.4";
55 name = "FormEncode-1.2.4";
56 buildInputs = with self; [];
56 buildInputs = with self; [];
57 doCheck = false;
57 doCheck = false;
58 propagatedBuildInputs = with self; [];
58 propagatedBuildInputs = with self; [];
59 src = fetchurl {
59 src = fetchurl {
60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
62 };
62 };
63 meta = {
63 meta = {
64 license = [ pkgs.lib.licenses.psfl ];
64 license = [ pkgs.lib.licenses.psfl ];
65 };
65 };
66 };
66 };
67 Jinja2 = super.buildPythonPackage {
67 Jinja2 = super.buildPythonPackage {
68 name = "Jinja2-2.7.3";
68 name = "Jinja2-2.7.3";
69 buildInputs = with self; [];
69 buildInputs = with self; [];
70 doCheck = false;
70 doCheck = false;
71 propagatedBuildInputs = with self; [MarkupSafe];
71 propagatedBuildInputs = with self; [MarkupSafe];
72 src = fetchurl {
72 src = fetchurl {
73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
75 };
75 };
76 meta = {
76 meta = {
77 license = [ pkgs.lib.licenses.bsdOriginal ];
77 license = [ pkgs.lib.licenses.bsdOriginal ];
78 };
78 };
79 };
79 };
80 Mako = super.buildPythonPackage {
80 Mako = super.buildPythonPackage {
81 name = "Mako-1.0.1";
81 name = "Mako-1.0.1";
82 buildInputs = with self; [];
82 buildInputs = with self; [];
83 doCheck = false;
83 doCheck = false;
84 propagatedBuildInputs = with self; [MarkupSafe];
84 propagatedBuildInputs = with self; [MarkupSafe];
85 src = fetchurl {
85 src = fetchurl {
86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
87 md5 = "9f0aafd177b039ef67b90ea350497a54";
87 md5 = "9f0aafd177b039ef67b90ea350497a54";
88 };
88 };
89 meta = {
89 meta = {
90 license = [ pkgs.lib.licenses.mit ];
90 license = [ pkgs.lib.licenses.mit ];
91 };
91 };
92 };
92 };
93 Markdown = super.buildPythonPackage {
93 Markdown = super.buildPythonPackage {
94 name = "Markdown-2.6.2";
94 name = "Markdown-2.6.2";
95 buildInputs = with self; [];
95 buildInputs = with self; [];
96 doCheck = false;
96 doCheck = false;
97 propagatedBuildInputs = with self; [];
97 propagatedBuildInputs = with self; [];
98 src = fetchurl {
98 src = fetchurl {
99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
101 };
101 };
102 meta = {
102 meta = {
103 license = [ pkgs.lib.licenses.bsdOriginal ];
103 license = [ pkgs.lib.licenses.bsdOriginal ];
104 };
104 };
105 };
105 };
106 MarkupSafe = super.buildPythonPackage {
106 MarkupSafe = super.buildPythonPackage {
107 name = "MarkupSafe-0.23";
107 name = "MarkupSafe-0.23";
108 buildInputs = with self; [];
108 buildInputs = with self; [];
109 doCheck = false;
109 doCheck = false;
110 propagatedBuildInputs = with self; [];
110 propagatedBuildInputs = with self; [];
111 src = fetchurl {
111 src = fetchurl {
112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
114 };
114 };
115 meta = {
115 meta = {
116 license = [ pkgs.lib.licenses.bsdOriginal ];
116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 };
117 };
118 };
118 };
119 MySQL-python = super.buildPythonPackage {
119 MySQL-python = super.buildPythonPackage {
120 name = "MySQL-python-1.2.5";
120 name = "MySQL-python-1.2.5";
121 buildInputs = with self; [];
121 buildInputs = with self; [];
122 doCheck = false;
122 doCheck = false;
123 propagatedBuildInputs = with self; [];
123 propagatedBuildInputs = with self; [];
124 src = fetchurl {
124 src = fetchurl {
125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
126 md5 = "654f75b302db6ed8dc5a898c625e030c";
126 md5 = "654f75b302db6ed8dc5a898c625e030c";
127 };
127 };
128 meta = {
128 meta = {
129 license = [ pkgs.lib.licenses.gpl1 ];
129 license = [ pkgs.lib.licenses.gpl1 ];
130 };
130 };
131 };
131 };
132 Paste = super.buildPythonPackage {
132 Paste = super.buildPythonPackage {
133 name = "Paste-2.0.2";
133 name = "Paste-2.0.2";
134 buildInputs = with self; [];
134 buildInputs = with self; [];
135 doCheck = false;
135 doCheck = false;
136 propagatedBuildInputs = with self; [six];
136 propagatedBuildInputs = with self; [six];
137 src = fetchurl {
137 src = fetchurl {
138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
140 };
140 };
141 meta = {
141 meta = {
142 license = [ pkgs.lib.licenses.mit ];
142 license = [ pkgs.lib.licenses.mit ];
143 };
143 };
144 };
144 };
145 PasteDeploy = super.buildPythonPackage {
145 PasteDeploy = super.buildPythonPackage {
146 name = "PasteDeploy-1.5.2";
146 name = "PasteDeploy-1.5.2";
147 buildInputs = with self; [];
147 buildInputs = with self; [];
148 doCheck = false;
148 doCheck = false;
149 propagatedBuildInputs = with self; [];
149 propagatedBuildInputs = with self; [];
150 src = fetchurl {
150 src = fetchurl {
151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
152 md5 = "352b7205c78c8de4987578d19431af3b";
152 md5 = "352b7205c78c8de4987578d19431af3b";
153 };
153 };
154 meta = {
154 meta = {
155 license = [ pkgs.lib.licenses.mit ];
155 license = [ pkgs.lib.licenses.mit ];
156 };
156 };
157 };
157 };
158 PasteScript = super.buildPythonPackage {
158 PasteScript = super.buildPythonPackage {
159 name = "PasteScript-1.7.5";
159 name = "PasteScript-1.7.5";
160 buildInputs = with self; [];
160 buildInputs = with self; [];
161 doCheck = false;
161 doCheck = false;
162 propagatedBuildInputs = with self; [Paste PasteDeploy];
162 propagatedBuildInputs = with self; [Paste PasteDeploy];
163 src = fetchurl {
163 src = fetchurl {
164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
166 };
166 };
167 meta = {
167 meta = {
168 license = [ pkgs.lib.licenses.mit ];
168 license = [ pkgs.lib.licenses.mit ];
169 };
169 };
170 };
170 };
171 Pygments = super.buildPythonPackage {
171 Pygments = super.buildPythonPackage {
172 name = "Pygments-2.1.3";
172 name = "Pygments-2.1.3";
173 buildInputs = with self; [];
173 buildInputs = with self; [];
174 doCheck = false;
174 doCheck = false;
175 propagatedBuildInputs = with self; [];
175 propagatedBuildInputs = with self; [];
176 src = fetchurl {
176 src = fetchurl {
177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
179 };
179 };
180 meta = {
180 meta = {
181 license = [ pkgs.lib.licenses.bsdOriginal ];
181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 };
182 };
183 };
183 };
184 Pylons = super.buildPythonPackage {
184 Pylons = super.buildPythonPackage {
185 name = "Pylons-1.0.1";
185 name = "Pylons-1.0.1";
186 buildInputs = with self; [];
186 buildInputs = with self; [];
187 doCheck = false;
187 doCheck = false;
188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
189 src = fetchurl {
189 src = fetchurl {
190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
191 md5 = "6cb880d75fa81213192142b07a6e4915";
191 md5 = "6cb880d75fa81213192142b07a6e4915";
192 };
192 };
193 meta = {
193 meta = {
194 license = [ pkgs.lib.licenses.bsdOriginal ];
194 license = [ pkgs.lib.licenses.bsdOriginal ];
195 };
195 };
196 };
196 };
197 Pyro4 = super.buildPythonPackage {
197 Pyro4 = super.buildPythonPackage {
198 name = "Pyro4-4.41";
198 name = "Pyro4-4.41";
199 buildInputs = with self; [];
199 buildInputs = with self; [];
200 doCheck = false;
200 doCheck = false;
201 propagatedBuildInputs = with self; [serpent];
201 propagatedBuildInputs = with self; [serpent];
202 src = fetchurl {
202 src = fetchurl {
203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
205 };
205 };
206 meta = {
206 meta = {
207 license = [ pkgs.lib.licenses.mit ];
207 license = [ pkgs.lib.licenses.mit ];
208 };
208 };
209 };
209 };
210 Routes = super.buildPythonPackage {
210 Routes = super.buildPythonPackage {
211 name = "Routes-1.13";
211 name = "Routes-1.13";
212 buildInputs = with self; [];
212 buildInputs = with self; [];
213 doCheck = false;
213 doCheck = false;
214 propagatedBuildInputs = with self; [repoze.lru];
214 propagatedBuildInputs = with self; [repoze.lru];
215 src = fetchurl {
215 src = fetchurl {
216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
217 md5 = "d527b0ab7dd9172b1275a41f97448783";
217 md5 = "d527b0ab7dd9172b1275a41f97448783";
218 };
218 };
219 meta = {
219 meta = {
220 license = [ pkgs.lib.licenses.bsdOriginal ];
220 license = [ pkgs.lib.licenses.bsdOriginal ];
221 };
221 };
222 };
222 };
223 SQLAlchemy = super.buildPythonPackage {
223 SQLAlchemy = super.buildPythonPackage {
224 name = "SQLAlchemy-0.9.9";
224 name = "SQLAlchemy-0.9.9";
225 buildInputs = with self; [];
225 buildInputs = with self; [];
226 doCheck = false;
226 doCheck = false;
227 propagatedBuildInputs = with self; [];
227 propagatedBuildInputs = with self; [];
228 src = fetchurl {
228 src = fetchurl {
229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
231 };
231 };
232 meta = {
232 meta = {
233 license = [ pkgs.lib.licenses.mit ];
233 license = [ pkgs.lib.licenses.mit ];
234 };
234 };
235 };
235 };
236 Sphinx = super.buildPythonPackage {
236 Sphinx = super.buildPythonPackage {
237 name = "Sphinx-1.2.2";
237 name = "Sphinx-1.2.2";
238 buildInputs = with self; [];
238 buildInputs = with self; [];
239 doCheck = false;
239 doCheck = false;
240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
241 src = fetchurl {
241 src = fetchurl {
242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
244 };
244 };
245 meta = {
245 meta = {
246 license = [ pkgs.lib.licenses.bsdOriginal ];
246 license = [ pkgs.lib.licenses.bsdOriginal ];
247 };
247 };
248 };
248 };
249 Tempita = super.buildPythonPackage {
249 Tempita = super.buildPythonPackage {
250 name = "Tempita-0.5.2";
250 name = "Tempita-0.5.2";
251 buildInputs = with self; [];
251 buildInputs = with self; [];
252 doCheck = false;
252 doCheck = false;
253 propagatedBuildInputs = with self; [];
253 propagatedBuildInputs = with self; [];
254 src = fetchurl {
254 src = fetchurl {
255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
257 };
257 };
258 meta = {
258 meta = {
259 license = [ pkgs.lib.licenses.mit ];
259 license = [ pkgs.lib.licenses.mit ];
260 };
260 };
261 };
261 };
262 URLObject = super.buildPythonPackage {
262 URLObject = super.buildPythonPackage {
263 name = "URLObject-2.4.0";
263 name = "URLObject-2.4.0";
264 buildInputs = with self; [];
264 buildInputs = with self; [];
265 doCheck = false;
265 doCheck = false;
266 propagatedBuildInputs = with self; [];
266 propagatedBuildInputs = with self; [];
267 src = fetchurl {
267 src = fetchurl {
268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
270 };
270 };
271 meta = {
271 meta = {
272 license = [ ];
272 license = [ ];
273 };
273 };
274 };
274 };
275 WebError = super.buildPythonPackage {
275 WebError = super.buildPythonPackage {
276 name = "WebError-0.10.3";
276 name = "WebError-0.10.3";
277 buildInputs = with self; [];
277 buildInputs = with self; [];
278 doCheck = false;
278 doCheck = false;
279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
280 src = fetchurl {
280 src = fetchurl {
281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
283 };
283 };
284 meta = {
284 meta = {
285 license = [ pkgs.lib.licenses.mit ];
285 license = [ pkgs.lib.licenses.mit ];
286 };
286 };
287 };
287 };
288 WebHelpers = super.buildPythonPackage {
288 WebHelpers = super.buildPythonPackage {
289 name = "WebHelpers-1.3";
289 name = "WebHelpers-1.3";
290 buildInputs = with self; [];
290 buildInputs = with self; [];
291 doCheck = false;
291 doCheck = false;
292 propagatedBuildInputs = with self; [MarkupSafe];
292 propagatedBuildInputs = with self; [MarkupSafe];
293 src = fetchurl {
293 src = fetchurl {
294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
295 md5 = "32749ffadfc40fea51075a7def32588b";
295 md5 = "32749ffadfc40fea51075a7def32588b";
296 };
296 };
297 meta = {
297 meta = {
298 license = [ pkgs.lib.licenses.bsdOriginal ];
298 license = [ pkgs.lib.licenses.bsdOriginal ];
299 };
299 };
300 };
300 };
301 WebHelpers2 = super.buildPythonPackage {
301 WebHelpers2 = super.buildPythonPackage {
302 name = "WebHelpers2-2.0";
302 name = "WebHelpers2-2.0";
303 buildInputs = with self; [];
303 buildInputs = with self; [];
304 doCheck = false;
304 doCheck = false;
305 propagatedBuildInputs = with self; [MarkupSafe six];
305 propagatedBuildInputs = with self; [MarkupSafe six];
306 src = fetchurl {
306 src = fetchurl {
307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
309 };
309 };
310 meta = {
310 meta = {
311 license = [ pkgs.lib.licenses.mit ];
311 license = [ pkgs.lib.licenses.mit ];
312 };
312 };
313 };
313 };
314 WebOb = super.buildPythonPackage {
314 WebOb = super.buildPythonPackage {
315 name = "WebOb-1.3.1";
315 name = "WebOb-1.3.1";
316 buildInputs = with self; [];
316 buildInputs = with self; [];
317 doCheck = false;
317 doCheck = false;
318 propagatedBuildInputs = with self; [];
318 propagatedBuildInputs = with self; [];
319 src = fetchurl {
319 src = fetchurl {
320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
321 md5 = "20918251c5726956ba8fef22d1556177";
321 md5 = "20918251c5726956ba8fef22d1556177";
322 };
322 };
323 meta = {
323 meta = {
324 license = [ pkgs.lib.licenses.mit ];
324 license = [ pkgs.lib.licenses.mit ];
325 };
325 };
326 };
326 };
327 WebTest = super.buildPythonPackage {
327 WebTest = super.buildPythonPackage {
328 name = "WebTest-1.4.3";
328 name = "WebTest-1.4.3";
329 buildInputs = with self; [];
329 buildInputs = with self; [];
330 doCheck = false;
330 doCheck = false;
331 propagatedBuildInputs = with self; [WebOb];
331 propagatedBuildInputs = with self; [WebOb];
332 src = fetchurl {
332 src = fetchurl {
333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
334 md5 = "631ce728bed92c681a4020a36adbc353";
334 md5 = "631ce728bed92c681a4020a36adbc353";
335 };
335 };
336 meta = {
336 meta = {
337 license = [ pkgs.lib.licenses.mit ];
337 license = [ pkgs.lib.licenses.mit ];
338 };
338 };
339 };
339 };
340 Whoosh = super.buildPythonPackage {
340 Whoosh = super.buildPythonPackage {
341 name = "Whoosh-2.7.0";
341 name = "Whoosh-2.7.0";
342 buildInputs = with self; [];
342 buildInputs = with self; [];
343 doCheck = false;
343 doCheck = false;
344 propagatedBuildInputs = with self; [];
344 propagatedBuildInputs = with self; [];
345 src = fetchurl {
345 src = fetchurl {
346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
348 };
348 };
349 meta = {
349 meta = {
350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
351 };
351 };
352 };
352 };
353 alembic = super.buildPythonPackage {
353 alembic = super.buildPythonPackage {
354 name = "alembic-0.8.4";
354 name = "alembic-0.8.4";
355 buildInputs = with self; [];
355 buildInputs = with self; [];
356 doCheck = false;
356 doCheck = false;
357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
358 src = fetchurl {
358 src = fetchurl {
359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
361 };
361 };
362 meta = {
362 meta = {
363 license = [ pkgs.lib.licenses.mit ];
363 license = [ pkgs.lib.licenses.mit ];
364 };
364 };
365 };
365 };
366 amqplib = super.buildPythonPackage {
366 amqplib = super.buildPythonPackage {
367 name = "amqplib-1.0.2";
367 name = "amqplib-1.0.2";
368 buildInputs = with self; [];
368 buildInputs = with self; [];
369 doCheck = false;
369 doCheck = false;
370 propagatedBuildInputs = with self; [];
370 propagatedBuildInputs = with self; [];
371 src = fetchurl {
371 src = fetchurl {
372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
374 };
374 };
375 meta = {
375 meta = {
376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
377 };
377 };
378 };
378 };
379 anyjson = super.buildPythonPackage {
379 anyjson = super.buildPythonPackage {
380 name = "anyjson-0.3.3";
380 name = "anyjson-0.3.3";
381 buildInputs = with self; [];
381 buildInputs = with self; [];
382 doCheck = false;
382 doCheck = false;
383 propagatedBuildInputs = with self; [];
383 propagatedBuildInputs = with self; [];
384 src = fetchurl {
384 src = fetchurl {
385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
387 };
387 };
388 meta = {
388 meta = {
389 license = [ pkgs.lib.licenses.bsdOriginal ];
389 license = [ pkgs.lib.licenses.bsdOriginal ];
390 };
390 };
391 };
391 };
392 appenlight-client = super.buildPythonPackage {
392 appenlight-client = super.buildPythonPackage {
393 name = "appenlight-client-0.6.14";
393 name = "appenlight-client-0.6.14";
394 buildInputs = with self; [];
394 buildInputs = with self; [];
395 doCheck = false;
395 doCheck = false;
396 propagatedBuildInputs = with self; [WebOb requests];
396 propagatedBuildInputs = with self; [WebOb requests];
397 src = fetchurl {
397 src = fetchurl {
398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
399 md5 = "578c69b09f4356d898fff1199b98a95c";
399 md5 = "578c69b09f4356d898fff1199b98a95c";
400 };
400 };
401 meta = {
401 meta = {
402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
403 };
403 };
404 };
404 };
405 authomatic = super.buildPythonPackage {
405 authomatic = super.buildPythonPackage {
406 name = "authomatic-0.1.0.post1";
406 name = "authomatic-0.1.0.post1";
407 buildInputs = with self; [];
407 buildInputs = with self; [];
408 doCheck = false;
408 doCheck = false;
409 propagatedBuildInputs = with self; [];
409 propagatedBuildInputs = with self; [];
410 src = fetchurl {
410 src = fetchurl {
411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
413 };
413 };
414 meta = {
414 meta = {
415 license = [ pkgs.lib.licenses.mit ];
415 license = [ pkgs.lib.licenses.mit ];
416 };
416 };
417 };
417 };
418 backport-ipaddress = super.buildPythonPackage {
418 backport-ipaddress = super.buildPythonPackage {
419 name = "backport-ipaddress-0.1";
419 name = "backport-ipaddress-0.1";
420 buildInputs = with self; [];
420 buildInputs = with self; [];
421 doCheck = false;
421 doCheck = false;
422 propagatedBuildInputs = with self; [];
422 propagatedBuildInputs = with self; [];
423 src = fetchurl {
423 src = fetchurl {
424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
425 md5 = "9c1f45f4361f71b124d7293a60006c05";
425 md5 = "9c1f45f4361f71b124d7293a60006c05";
426 };
426 };
427 meta = {
427 meta = {
428 license = [ pkgs.lib.licenses.psfl ];
428 license = [ pkgs.lib.licenses.psfl ];
429 };
429 };
430 };
430 };
431 bottle = super.buildPythonPackage {
431 bottle = super.buildPythonPackage {
432 name = "bottle-0.12.8";
432 name = "bottle-0.12.8";
433 buildInputs = with self; [];
433 buildInputs = with self; [];
434 doCheck = false;
434 doCheck = false;
435 propagatedBuildInputs = with self; [];
435 propagatedBuildInputs = with self; [];
436 src = fetchurl {
436 src = fetchurl {
437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
439 };
439 };
440 meta = {
440 meta = {
441 license = [ pkgs.lib.licenses.mit ];
441 license = [ pkgs.lib.licenses.mit ];
442 };
442 };
443 };
443 };
444 bumpversion = super.buildPythonPackage {
444 bumpversion = super.buildPythonPackage {
445 name = "bumpversion-0.5.3";
445 name = "bumpversion-0.5.3";
446 buildInputs = with self; [];
446 buildInputs = with self; [];
447 doCheck = false;
447 doCheck = false;
448 propagatedBuildInputs = with self; [];
448 propagatedBuildInputs = with self; [];
449 src = fetchurl {
449 src = fetchurl {
450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
452 };
452 };
453 meta = {
453 meta = {
454 license = [ pkgs.lib.licenses.mit ];
454 license = [ pkgs.lib.licenses.mit ];
455 };
455 };
456 };
456 };
457 celery = super.buildPythonPackage {
457 celery = super.buildPythonPackage {
458 name = "celery-2.2.10";
458 name = "celery-2.2.10";
459 buildInputs = with self; [];
459 buildInputs = with self; [];
460 doCheck = false;
460 doCheck = false;
461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
462 src = fetchurl {
462 src = fetchurl {
463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
464 md5 = "898bc87e54f278055b561316ba73e222";
464 md5 = "898bc87e54f278055b561316ba73e222";
465 };
465 };
466 meta = {
466 meta = {
467 license = [ pkgs.lib.licenses.bsdOriginal ];
467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 };
468 };
469 };
469 };
470 click = super.buildPythonPackage {
470 click = super.buildPythonPackage {
471 name = "click-5.1";
471 name = "click-5.1";
472 buildInputs = with self; [];
472 buildInputs = with self; [];
473 doCheck = false;
473 doCheck = false;
474 propagatedBuildInputs = with self; [];
474 propagatedBuildInputs = with self; [];
475 src = fetchurl {
475 src = fetchurl {
476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
477 md5 = "9c5323008cccfe232a8b161fc8196d41";
477 md5 = "9c5323008cccfe232a8b161fc8196d41";
478 };
478 };
479 meta = {
479 meta = {
480 license = [ pkgs.lib.licenses.bsdOriginal ];
480 license = [ pkgs.lib.licenses.bsdOriginal ];
481 };
481 };
482 };
482 };
483 colander = super.buildPythonPackage {
483 colander = super.buildPythonPackage {
484 name = "colander-1.2";
484 name = "colander-1.2";
485 buildInputs = with self; [];
485 buildInputs = with self; [];
486 doCheck = false;
486 doCheck = false;
487 propagatedBuildInputs = with self; [translationstring iso8601];
487 propagatedBuildInputs = with self; [translationstring iso8601];
488 src = fetchurl {
488 src = fetchurl {
489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
490 md5 = "83db21b07936a0726e588dae1914b9ed";
490 md5 = "83db21b07936a0726e588dae1914b9ed";
491 };
491 };
492 meta = {
492 meta = {
493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
494 };
494 };
495 };
495 };
496 configobj = super.buildPythonPackage {
496 configobj = super.buildPythonPackage {
497 name = "configobj-5.0.6";
497 name = "configobj-5.0.6";
498 buildInputs = with self; [];
498 buildInputs = with self; [];
499 doCheck = false;
499 doCheck = false;
500 propagatedBuildInputs = with self; [six];
500 propagatedBuildInputs = with self; [six];
501 src = fetchurl {
501 src = fetchurl {
502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
504 };
504 };
505 meta = {
505 meta = {
506 license = [ pkgs.lib.licenses.bsdOriginal ];
506 license = [ pkgs.lib.licenses.bsdOriginal ];
507 };
507 };
508 };
508 };
509 cov-core = super.buildPythonPackage {
509 cov-core = super.buildPythonPackage {
510 name = "cov-core-1.15.0";
510 name = "cov-core-1.15.0";
511 buildInputs = with self; [];
511 buildInputs = with self; [];
512 doCheck = false;
512 doCheck = false;
513 propagatedBuildInputs = with self; [coverage];
513 propagatedBuildInputs = with self; [coverage];
514 src = fetchurl {
514 src = fetchurl {
515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
517 };
517 };
518 meta = {
518 meta = {
519 license = [ pkgs.lib.licenses.mit ];
519 license = [ pkgs.lib.licenses.mit ];
520 };
520 };
521 };
521 };
522 coverage = super.buildPythonPackage {
522 coverage = super.buildPythonPackage {
523 name = "coverage-3.7.1";
523 name = "coverage-3.7.1";
524 buildInputs = with self; [];
524 buildInputs = with self; [];
525 doCheck = false;
525 doCheck = false;
526 propagatedBuildInputs = with self; [];
526 propagatedBuildInputs = with self; [];
527 src = fetchurl {
527 src = fetchurl {
528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
530 };
530 };
531 meta = {
531 meta = {
532 license = [ pkgs.lib.licenses.bsdOriginal ];
532 license = [ pkgs.lib.licenses.bsdOriginal ];
533 };
533 };
534 };
534 };
535 cssselect = super.buildPythonPackage {
535 cssselect = super.buildPythonPackage {
536 name = "cssselect-0.9.1";
536 name = "cssselect-0.9.1";
537 buildInputs = with self; [];
537 buildInputs = with self; [];
538 doCheck = false;
538 doCheck = false;
539 propagatedBuildInputs = with self; [];
539 propagatedBuildInputs = with self; [];
540 src = fetchurl {
540 src = fetchurl {
541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
543 };
543 };
544 meta = {
544 meta = {
545 license = [ pkgs.lib.licenses.bsdOriginal ];
545 license = [ pkgs.lib.licenses.bsdOriginal ];
546 };
546 };
547 };
547 };
548 decorator = super.buildPythonPackage {
548 decorator = super.buildPythonPackage {
549 name = "decorator-3.4.2";
549 name = "decorator-3.4.2";
550 buildInputs = with self; [];
550 buildInputs = with self; [];
551 doCheck = false;
551 doCheck = false;
552 propagatedBuildInputs = with self; [];
552 propagatedBuildInputs = with self; [];
553 src = fetchurl {
553 src = fetchurl {
554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
556 };
556 };
557 meta = {
557 meta = {
558 license = [ pkgs.lib.licenses.bsdOriginal ];
558 license = [ pkgs.lib.licenses.bsdOriginal ];
559 };
559 };
560 };
560 };
561 docutils = super.buildPythonPackage {
561 docutils = super.buildPythonPackage {
562 name = "docutils-0.12";
562 name = "docutils-0.12";
563 buildInputs = with self; [];
563 buildInputs = with self; [];
564 doCheck = false;
564 doCheck = false;
565 propagatedBuildInputs = with self; [];
565 propagatedBuildInputs = with self; [];
566 src = fetchurl {
566 src = fetchurl {
567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
568 md5 = "4622263b62c5c771c03502afa3157768";
568 md5 = "4622263b62c5c771c03502afa3157768";
569 };
569 };
570 meta = {
570 meta = {
571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
572 };
572 };
573 };
573 };
574 dogpile.cache = super.buildPythonPackage {
574 dogpile.cache = super.buildPythonPackage {
575 name = "dogpile.cache-0.6.1";
575 name = "dogpile.cache-0.6.1";
576 buildInputs = with self; [];
576 buildInputs = with self; [];
577 doCheck = false;
577 doCheck = false;
578 propagatedBuildInputs = with self; [dogpile.core];
578 propagatedBuildInputs = with self; [dogpile.core];
579 src = fetchurl {
579 src = fetchurl {
580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
582 };
582 };
583 meta = {
583 meta = {
584 license = [ pkgs.lib.licenses.bsdOriginal ];
584 license = [ pkgs.lib.licenses.bsdOriginal ];
585 };
585 };
586 };
586 };
587 dogpile.core = super.buildPythonPackage {
587 dogpile.core = super.buildPythonPackage {
588 name = "dogpile.core-0.4.1";
588 name = "dogpile.core-0.4.1";
589 buildInputs = with self; [];
589 buildInputs = with self; [];
590 doCheck = false;
590 doCheck = false;
591 propagatedBuildInputs = with self; [];
591 propagatedBuildInputs = with self; [];
592 src = fetchurl {
592 src = fetchurl {
593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
594 md5 = "01cb19f52bba3e95c9b560f39341f045";
594 md5 = "01cb19f52bba3e95c9b560f39341f045";
595 };
595 };
596 meta = {
596 meta = {
597 license = [ pkgs.lib.licenses.bsdOriginal ];
597 license = [ pkgs.lib.licenses.bsdOriginal ];
598 };
598 };
599 };
599 };
600 dulwich = super.buildPythonPackage {
600 dulwich = super.buildPythonPackage {
601 name = "dulwich-0.12.0";
601 name = "dulwich-0.12.0";
602 buildInputs = with self; [];
602 buildInputs = with self; [];
603 doCheck = false;
603 doCheck = false;
604 propagatedBuildInputs = with self; [];
604 propagatedBuildInputs = with self; [];
605 src = fetchurl {
605 src = fetchurl {
606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
608 };
608 };
609 meta = {
609 meta = {
610 license = [ pkgs.lib.licenses.gpl2Plus ];
610 license = [ pkgs.lib.licenses.gpl2Plus ];
611 };
611 };
612 };
612 };
613 ecdsa = super.buildPythonPackage {
613 ecdsa = super.buildPythonPackage {
614 name = "ecdsa-0.11";
614 name = "ecdsa-0.11";
615 buildInputs = with self; [];
615 buildInputs = with self; [];
616 doCheck = false;
616 doCheck = false;
617 propagatedBuildInputs = with self; [];
617 propagatedBuildInputs = with self; [];
618 src = fetchurl {
618 src = fetchurl {
619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
621 };
621 };
622 meta = {
622 meta = {
623 license = [ pkgs.lib.licenses.mit ];
623 license = [ pkgs.lib.licenses.mit ];
624 };
624 };
625 };
625 };
626 elasticsearch = super.buildPythonPackage {
626 elasticsearch = super.buildPythonPackage {
627 name = "elasticsearch-2.3.0";
627 name = "elasticsearch-2.3.0";
628 buildInputs = with self; [];
628 buildInputs = with self; [];
629 doCheck = false;
629 doCheck = false;
630 propagatedBuildInputs = with self; [urllib3];
630 propagatedBuildInputs = with self; [urllib3];
631 src = fetchurl {
631 src = fetchurl {
632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
633 md5 = "2550f3b51629cf1ef9636608af92c340";
633 md5 = "2550f3b51629cf1ef9636608af92c340";
634 };
634 };
635 meta = {
635 meta = {
636 license = [ pkgs.lib.licenses.asl20 ];
636 license = [ pkgs.lib.licenses.asl20 ];
637 };
637 };
638 };
638 };
639 elasticsearch-dsl = super.buildPythonPackage {
639 elasticsearch-dsl = super.buildPythonPackage {
640 name = "elasticsearch-dsl-2.0.0";
640 name = "elasticsearch-dsl-2.0.0";
641 buildInputs = with self; [];
641 buildInputs = with self; [];
642 doCheck = false;
642 doCheck = false;
643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
644 src = fetchurl {
644 src = fetchurl {
645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
647 };
647 };
648 meta = {
648 meta = {
649 license = [ pkgs.lib.licenses.asl20 ];
649 license = [ pkgs.lib.licenses.asl20 ];
650 };
650 };
651 };
651 };
652 flake8 = super.buildPythonPackage {
652 flake8 = super.buildPythonPackage {
653 name = "flake8-2.4.1";
653 name = "flake8-2.4.1";
654 buildInputs = with self; [];
654 buildInputs = with self; [];
655 doCheck = false;
655 doCheck = false;
656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
657 src = fetchurl {
657 src = fetchurl {
658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
660 };
660 };
661 meta = {
661 meta = {
662 license = [ pkgs.lib.licenses.mit ];
662 license = [ pkgs.lib.licenses.mit ];
663 };
663 };
664 };
664 };
665 future = super.buildPythonPackage {
665 future = super.buildPythonPackage {
666 name = "future-0.14.3";
666 name = "future-0.14.3";
667 buildInputs = with self; [];
667 buildInputs = with self; [];
668 doCheck = false;
668 doCheck = false;
669 propagatedBuildInputs = with self; [];
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
670 src = fetchurl {
671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
673 };
673 };
674 meta = {
674 meta = {
675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
676 };
676 };
677 };
677 };
678 futures = super.buildPythonPackage {
678 futures = super.buildPythonPackage {
679 name = "futures-3.0.2";
679 name = "futures-3.0.2";
680 buildInputs = with self; [];
680 buildInputs = with self; [];
681 doCheck = false;
681 doCheck = false;
682 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
683 src = fetchurl {
683 src = fetchurl {
684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
686 };
686 };
687 meta = {
687 meta = {
688 license = [ pkgs.lib.licenses.bsdOriginal ];
688 license = [ pkgs.lib.licenses.bsdOriginal ];
689 };
689 };
690 };
690 };
691 gnureadline = super.buildPythonPackage {
691 gnureadline = super.buildPythonPackage {
692 name = "gnureadline-6.3.3";
692 name = "gnureadline-6.3.3";
693 buildInputs = with self; [];
693 buildInputs = with self; [];
694 doCheck = false;
694 doCheck = false;
695 propagatedBuildInputs = with self; [];
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
696 src = fetchurl {
697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
699 };
699 };
700 meta = {
700 meta = {
701 license = [ pkgs.lib.licenses.gpl1 ];
701 license = [ pkgs.lib.licenses.gpl1 ];
702 };
702 };
703 };
703 };
704 gprof2dot = super.buildPythonPackage {
704 gprof2dot = super.buildPythonPackage {
705 name = "gprof2dot-2015.12.1";
705 name = "gprof2dot-2015.12.1";
706 buildInputs = with self; [];
706 buildInputs = with self; [];
707 doCheck = false;
707 doCheck = false;
708 propagatedBuildInputs = with self; [];
708 propagatedBuildInputs = with self; [];
709 src = fetchurl {
709 src = fetchurl {
710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
711 md5 = "e23bf4e2f94db032750c193384b4165b";
711 md5 = "e23bf4e2f94db032750c193384b4165b";
712 };
712 };
713 meta = {
713 meta = {
714 license = [ { fullName = "LGPL"; } ];
714 license = [ { fullName = "LGPL"; } ];
715 };
715 };
716 };
716 };
717 gunicorn = super.buildPythonPackage {
717 gunicorn = super.buildPythonPackage {
718 name = "gunicorn-19.6.0";
718 name = "gunicorn-19.6.0";
719 buildInputs = with self; [];
719 buildInputs = with self; [];
720 doCheck = false;
720 doCheck = false;
721 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
722 src = fetchurl {
722 src = fetchurl {
723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
724 md5 = "338e5e8a83ea0f0625f768dba4597530";
724 md5 = "338e5e8a83ea0f0625f768dba4597530";
725 };
725 };
726 meta = {
726 meta = {
727 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
728 };
728 };
729 };
729 };
730 infrae.cache = super.buildPythonPackage {
730 infrae.cache = super.buildPythonPackage {
731 name = "infrae.cache-1.0.1";
731 name = "infrae.cache-1.0.1";
732 buildInputs = with self; [];
732 buildInputs = with self; [];
733 doCheck = false;
733 doCheck = false;
734 propagatedBuildInputs = with self; [Beaker repoze.lru];
734 propagatedBuildInputs = with self; [Beaker repoze.lru];
735 src = fetchurl {
735 src = fetchurl {
736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
737 md5 = "b09076a766747e6ed2a755cc62088e32";
737 md5 = "b09076a766747e6ed2a755cc62088e32";
738 };
738 };
739 meta = {
739 meta = {
740 license = [ pkgs.lib.licenses.zpt21 ];
740 license = [ pkgs.lib.licenses.zpt21 ];
741 };
741 };
742 };
742 };
743 invoke = super.buildPythonPackage {
743 invoke = super.buildPythonPackage {
744 name = "invoke-0.13.0";
744 name = "invoke-0.13.0";
745 buildInputs = with self; [];
745 buildInputs = with self; [];
746 doCheck = false;
746 doCheck = false;
747 propagatedBuildInputs = with self; [];
747 propagatedBuildInputs = with self; [];
748 src = fetchurl {
748 src = fetchurl {
749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
751 };
751 };
752 meta = {
752 meta = {
753 license = [ pkgs.lib.licenses.bsdOriginal ];
753 license = [ pkgs.lib.licenses.bsdOriginal ];
754 };
754 };
755 };
755 };
756 ipdb = super.buildPythonPackage {
756 ipdb = super.buildPythonPackage {
757 name = "ipdb-0.8";
757 name = "ipdb-0.8";
758 buildInputs = with self; [];
758 buildInputs = with self; [];
759 doCheck = false;
759 doCheck = false;
760 propagatedBuildInputs = with self; [ipython];
760 propagatedBuildInputs = with self; [ipython];
761 src = fetchurl {
761 src = fetchurl {
762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
764 };
764 };
765 meta = {
765 meta = {
766 license = [ pkgs.lib.licenses.gpl1 ];
766 license = [ pkgs.lib.licenses.gpl1 ];
767 };
767 };
768 };
768 };
769 ipython = super.buildPythonPackage {
769 ipython = super.buildPythonPackage {
770 name = "ipython-3.1.0";
770 name = "ipython-3.1.0";
771 buildInputs = with self; [];
771 buildInputs = with self; [];
772 doCheck = false;
772 doCheck = false;
773 propagatedBuildInputs = with self; [];
773 propagatedBuildInputs = with self; [];
774 src = fetchurl {
774 src = fetchurl {
775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
777 };
777 };
778 meta = {
778 meta = {
779 license = [ pkgs.lib.licenses.bsdOriginal ];
779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 };
780 };
781 };
781 };
782 iso8601 = super.buildPythonPackage {
782 iso8601 = super.buildPythonPackage {
783 name = "iso8601-0.1.11";
783 name = "iso8601-0.1.11";
784 buildInputs = with self; [];
784 buildInputs = with self; [];
785 doCheck = false;
785 doCheck = false;
786 propagatedBuildInputs = with self; [];
786 propagatedBuildInputs = with self; [];
787 src = fetchurl {
787 src = fetchurl {
788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
789 md5 = "b06d11cd14a64096f907086044f0fe38";
789 md5 = "b06d11cd14a64096f907086044f0fe38";
790 };
790 };
791 meta = {
791 meta = {
792 license = [ pkgs.lib.licenses.mit ];
792 license = [ pkgs.lib.licenses.mit ];
793 };
793 };
794 };
794 };
795 itsdangerous = super.buildPythonPackage {
795 itsdangerous = super.buildPythonPackage {
796 name = "itsdangerous-0.24";
796 name = "itsdangerous-0.24";
797 buildInputs = with self; [];
797 buildInputs = with self; [];
798 doCheck = false;
798 doCheck = false;
799 propagatedBuildInputs = with self; [];
799 propagatedBuildInputs = with self; [];
800 src = fetchurl {
800 src = fetchurl {
801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
802 md5 = "a3d55aa79369aef5345c036a8a26307f";
802 md5 = "a3d55aa79369aef5345c036a8a26307f";
803 };
803 };
804 meta = {
804 meta = {
805 license = [ pkgs.lib.licenses.bsdOriginal ];
805 license = [ pkgs.lib.licenses.bsdOriginal ];
806 };
806 };
807 };
807 };
808 kombu = super.buildPythonPackage {
808 kombu = super.buildPythonPackage {
809 name = "kombu-1.5.1";
809 name = "kombu-1.5.1";
810 buildInputs = with self; [];
810 buildInputs = with self; [];
811 doCheck = false;
811 doCheck = false;
812 propagatedBuildInputs = with self; [anyjson amqplib];
812 propagatedBuildInputs = with self; [anyjson amqplib];
813 src = fetchurl {
813 src = fetchurl {
814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
816 };
816 };
817 meta = {
817 meta = {
818 license = [ pkgs.lib.licenses.bsdOriginal ];
818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 };
819 };
820 };
820 };
821 lxml = super.buildPythonPackage {
821 lxml = super.buildPythonPackage {
822 name = "lxml-3.4.4";
822 name = "lxml-3.4.4";
823 buildInputs = with self; [];
823 buildInputs = with self; [];
824 doCheck = false;
824 doCheck = false;
825 propagatedBuildInputs = with self; [];
825 propagatedBuildInputs = with self; [];
826 src = fetchurl {
826 src = fetchurl {
827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
829 };
829 };
830 meta = {
830 meta = {
831 license = [ pkgs.lib.licenses.bsdOriginal ];
831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 };
832 };
833 };
833 };
834 marshmallow = super.buildPythonPackage {
835 name = "marshmallow-2.8.0";
836 buildInputs = with self; [];
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/4f/64/9393d77847d86981c84b88bbea627d30ff71b5ab1402636b366f73737817/marshmallow-2.8.0.tar.gz";
841 md5 = "204513fc123a3d9bdd7b63b9747f02e6";
842 };
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
845 };
846 };
834 mccabe = super.buildPythonPackage {
847 mccabe = super.buildPythonPackage {
835 name = "mccabe-0.3";
848 name = "mccabe-0.3";
836 buildInputs = with self; [];
849 buildInputs = with self; [];
837 doCheck = false;
850 doCheck = false;
838 propagatedBuildInputs = with self; [];
851 propagatedBuildInputs = with self; [];
839 src = fetchurl {
852 src = fetchurl {
840 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
853 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
841 md5 = "81640948ff226f8c12b3277059489157";
854 md5 = "81640948ff226f8c12b3277059489157";
842 };
855 };
843 meta = {
856 meta = {
844 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
857 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
845 };
858 };
846 };
859 };
847 meld3 = super.buildPythonPackage {
860 meld3 = super.buildPythonPackage {
848 name = "meld3-1.0.2";
861 name = "meld3-1.0.2";
849 buildInputs = with self; [];
862 buildInputs = with self; [];
850 doCheck = false;
863 doCheck = false;
851 propagatedBuildInputs = with self; [];
864 propagatedBuildInputs = with self; [];
852 src = fetchurl {
865 src = fetchurl {
853 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
866 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
854 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
867 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
855 };
868 };
856 meta = {
869 meta = {
857 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
870 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
858 };
871 };
859 };
872 };
860 mock = super.buildPythonPackage {
873 mock = super.buildPythonPackage {
861 name = "mock-1.0.1";
874 name = "mock-1.0.1";
862 buildInputs = with self; [];
875 buildInputs = with self; [];
863 doCheck = false;
876 doCheck = false;
864 propagatedBuildInputs = with self; [];
877 propagatedBuildInputs = with self; [];
865 src = fetchurl {
878 src = fetchurl {
866 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
879 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
867 md5 = "869f08d003c289a97c1a6610faf5e913";
880 md5 = "869f08d003c289a97c1a6610faf5e913";
868 };
881 };
869 meta = {
882 meta = {
870 license = [ pkgs.lib.licenses.bsdOriginal ];
883 license = [ pkgs.lib.licenses.bsdOriginal ];
871 };
884 };
872 };
885 };
873 msgpack-python = super.buildPythonPackage {
886 msgpack-python = super.buildPythonPackage {
874 name = "msgpack-python-0.4.6";
887 name = "msgpack-python-0.4.6";
875 buildInputs = with self; [];
888 buildInputs = with self; [];
876 doCheck = false;
889 doCheck = false;
877 propagatedBuildInputs = with self; [];
890 propagatedBuildInputs = with self; [];
878 src = fetchurl {
891 src = fetchurl {
879 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
892 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
880 md5 = "8b317669314cf1bc881716cccdaccb30";
893 md5 = "8b317669314cf1bc881716cccdaccb30";
881 };
894 };
882 meta = {
895 meta = {
883 license = [ pkgs.lib.licenses.asl20 ];
896 license = [ pkgs.lib.licenses.asl20 ];
884 };
897 };
885 };
898 };
886 nose = super.buildPythonPackage {
899 nose = super.buildPythonPackage {
887 name = "nose-1.3.6";
900 name = "nose-1.3.6";
888 buildInputs = with self; [];
901 buildInputs = with self; [];
889 doCheck = false;
902 doCheck = false;
890 propagatedBuildInputs = with self; [];
903 propagatedBuildInputs = with self; [];
891 src = fetchurl {
904 src = fetchurl {
892 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
905 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
893 md5 = "0ca546d81ca8309080fc80cb389e7a16";
906 md5 = "0ca546d81ca8309080fc80cb389e7a16";
894 };
907 };
895 meta = {
908 meta = {
896 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
909 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
897 };
910 };
898 };
911 };
899 objgraph = super.buildPythonPackage {
912 objgraph = super.buildPythonPackage {
900 name = "objgraph-2.0.0";
913 name = "objgraph-2.0.0";
901 buildInputs = with self; [];
914 buildInputs = with self; [];
902 doCheck = false;
915 doCheck = false;
903 propagatedBuildInputs = with self; [];
916 propagatedBuildInputs = with self; [];
904 src = fetchurl {
917 src = fetchurl {
905 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
918 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
906 md5 = "25b0d5e5adc74aa63ead15699614159c";
919 md5 = "25b0d5e5adc74aa63ead15699614159c";
907 };
920 };
908 meta = {
921 meta = {
909 license = [ pkgs.lib.licenses.mit ];
922 license = [ pkgs.lib.licenses.mit ];
910 };
923 };
911 };
924 };
912 packaging = super.buildPythonPackage {
925 packaging = super.buildPythonPackage {
913 name = "packaging-15.2";
926 name = "packaging-15.2";
914 buildInputs = with self; [];
927 buildInputs = with self; [];
915 doCheck = false;
928 doCheck = false;
916 propagatedBuildInputs = with self; [];
929 propagatedBuildInputs = with self; [];
917 src = fetchurl {
930 src = fetchurl {
918 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
931 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
919 md5 = "c16093476f6ced42128bf610e5db3784";
932 md5 = "c16093476f6ced42128bf610e5db3784";
920 };
933 };
921 meta = {
934 meta = {
922 license = [ pkgs.lib.licenses.asl20 ];
935 license = [ pkgs.lib.licenses.asl20 ];
923 };
936 };
924 };
937 };
925 paramiko = super.buildPythonPackage {
938 paramiko = super.buildPythonPackage {
926 name = "paramiko-1.15.1";
939 name = "paramiko-1.15.1";
927 buildInputs = with self; [];
940 buildInputs = with self; [];
928 doCheck = false;
941 doCheck = false;
929 propagatedBuildInputs = with self; [pycrypto ecdsa];
942 propagatedBuildInputs = with self; [pycrypto ecdsa];
930 src = fetchurl {
943 src = fetchurl {
931 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
944 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
932 md5 = "48c274c3f9b1282932567b21f6acf3b5";
945 md5 = "48c274c3f9b1282932567b21f6acf3b5";
933 };
946 };
934 meta = {
947 meta = {
935 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
948 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
936 };
949 };
937 };
950 };
938 pep8 = super.buildPythonPackage {
951 pep8 = super.buildPythonPackage {
939 name = "pep8-1.5.7";
952 name = "pep8-1.5.7";
940 buildInputs = with self; [];
953 buildInputs = with self; [];
941 doCheck = false;
954 doCheck = false;
942 propagatedBuildInputs = with self; [];
955 propagatedBuildInputs = with self; [];
943 src = fetchurl {
956 src = fetchurl {
944 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
957 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
945 md5 = "f6adbdd69365ecca20513c709f9b7c93";
958 md5 = "f6adbdd69365ecca20513c709f9b7c93";
946 };
959 };
947 meta = {
960 meta = {
948 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
961 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
949 };
962 };
950 };
963 };
951 psutil = super.buildPythonPackage {
964 psutil = super.buildPythonPackage {
952 name = "psutil-2.2.1";
965 name = "psutil-2.2.1";
953 buildInputs = with self; [];
966 buildInputs = with self; [];
954 doCheck = false;
967 doCheck = false;
955 propagatedBuildInputs = with self; [];
968 propagatedBuildInputs = with self; [];
956 src = fetchurl {
969 src = fetchurl {
957 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
970 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
958 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
971 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
959 };
972 };
960 meta = {
973 meta = {
961 license = [ pkgs.lib.licenses.bsdOriginal ];
974 license = [ pkgs.lib.licenses.bsdOriginal ];
962 };
975 };
963 };
976 };
964 psycopg2 = super.buildPythonPackage {
977 psycopg2 = super.buildPythonPackage {
965 name = "psycopg2-2.6.1";
978 name = "psycopg2-2.6.1";
966 buildInputs = with self; [];
979 buildInputs = with self; [];
967 doCheck = false;
980 doCheck = false;
968 propagatedBuildInputs = with self; [];
981 propagatedBuildInputs = with self; [];
969 src = fetchurl {
982 src = fetchurl {
970 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
983 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
971 md5 = "842b44f8c95517ed5b792081a2370da1";
984 md5 = "842b44f8c95517ed5b792081a2370da1";
972 };
985 };
973 meta = {
986 meta = {
974 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
987 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
975 };
988 };
976 };
989 };
977 py = super.buildPythonPackage {
990 py = super.buildPythonPackage {
978 name = "py-1.4.29";
991 name = "py-1.4.29";
979 buildInputs = with self; [];
992 buildInputs = with self; [];
980 doCheck = false;
993 doCheck = false;
981 propagatedBuildInputs = with self; [];
994 propagatedBuildInputs = with self; [];
982 src = fetchurl {
995 src = fetchurl {
983 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
996 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
984 md5 = "c28e0accba523a29b35a48bb703fb96c";
997 md5 = "c28e0accba523a29b35a48bb703fb96c";
985 };
998 };
986 meta = {
999 meta = {
987 license = [ pkgs.lib.licenses.mit ];
1000 license = [ pkgs.lib.licenses.mit ];
988 };
1001 };
989 };
1002 };
990 py-bcrypt = super.buildPythonPackage {
1003 py-bcrypt = super.buildPythonPackage {
991 name = "py-bcrypt-0.4";
1004 name = "py-bcrypt-0.4";
992 buildInputs = with self; [];
1005 buildInputs = with self; [];
993 doCheck = false;
1006 doCheck = false;
994 propagatedBuildInputs = with self; [];
1007 propagatedBuildInputs = with self; [];
995 src = fetchurl {
1008 src = fetchurl {
996 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1009 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
997 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1010 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
998 };
1011 };
999 meta = {
1012 meta = {
1000 license = [ pkgs.lib.licenses.bsdOriginal ];
1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1001 };
1014 };
1002 };
1015 };
1003 py-gfm = super.buildPythonPackage {
1016 py-gfm = super.buildPythonPackage {
1004 name = "py-gfm-0.1.3";
1017 name = "py-gfm-0.1.3";
1005 buildInputs = with self; [];
1018 buildInputs = with self; [];
1006 doCheck = false;
1019 doCheck = false;
1007 propagatedBuildInputs = with self; [setuptools Markdown];
1020 propagatedBuildInputs = with self; [setuptools Markdown];
1008 src = fetchurl {
1021 src = fetchurl {
1009 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1022 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1010 md5 = "e588d9e69640a241b97e2c59c22527a6";
1023 md5 = "e588d9e69640a241b97e2c59c22527a6";
1011 };
1024 };
1012 meta = {
1025 meta = {
1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1026 license = [ pkgs.lib.licenses.bsdOriginal ];
1014 };
1027 };
1015 };
1028 };
1016 pycrypto = super.buildPythonPackage {
1029 pycrypto = super.buildPythonPackage {
1017 name = "pycrypto-2.6.1";
1030 name = "pycrypto-2.6.1";
1018 buildInputs = with self; [];
1031 buildInputs = with self; [];
1019 doCheck = false;
1032 doCheck = false;
1020 propagatedBuildInputs = with self; [];
1033 propagatedBuildInputs = with self; [];
1021 src = fetchurl {
1034 src = fetchurl {
1022 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1035 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1023 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1036 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1024 };
1037 };
1025 meta = {
1038 meta = {
1026 license = [ pkgs.lib.licenses.publicDomain ];
1039 license = [ pkgs.lib.licenses.publicDomain ];
1027 };
1040 };
1028 };
1041 };
1029 pycurl = super.buildPythonPackage {
1042 pycurl = super.buildPythonPackage {
1030 name = "pycurl-7.19.5";
1043 name = "pycurl-7.19.5";
1031 buildInputs = with self; [];
1044 buildInputs = with self; [];
1032 doCheck = false;
1045 doCheck = false;
1033 propagatedBuildInputs = with self; [];
1046 propagatedBuildInputs = with self; [];
1034 src = fetchurl {
1047 src = fetchurl {
1035 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1048 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1036 md5 = "47b4eac84118e2606658122104e62072";
1049 md5 = "47b4eac84118e2606658122104e62072";
1037 };
1050 };
1038 meta = {
1051 meta = {
1039 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1052 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1040 };
1053 };
1041 };
1054 };
1042 pyflakes = super.buildPythonPackage {
1055 pyflakes = super.buildPythonPackage {
1043 name = "pyflakes-0.8.1";
1056 name = "pyflakes-0.8.1";
1044 buildInputs = with self; [];
1057 buildInputs = with self; [];
1045 doCheck = false;
1058 doCheck = false;
1046 propagatedBuildInputs = with self; [];
1059 propagatedBuildInputs = with self; [];
1047 src = fetchurl {
1060 src = fetchurl {
1048 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1061 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1049 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1062 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1050 };
1063 };
1051 meta = {
1064 meta = {
1052 license = [ pkgs.lib.licenses.mit ];
1065 license = [ pkgs.lib.licenses.mit ];
1053 };
1066 };
1054 };
1067 };
1055 pyparsing = super.buildPythonPackage {
1068 pyparsing = super.buildPythonPackage {
1056 name = "pyparsing-1.5.7";
1069 name = "pyparsing-1.5.7";
1057 buildInputs = with self; [];
1070 buildInputs = with self; [];
1058 doCheck = false;
1071 doCheck = false;
1059 propagatedBuildInputs = with self; [];
1072 propagatedBuildInputs = with self; [];
1060 src = fetchurl {
1073 src = fetchurl {
1061 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1074 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1062 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1075 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1063 };
1076 };
1064 meta = {
1077 meta = {
1065 license = [ pkgs.lib.licenses.mit ];
1078 license = [ pkgs.lib.licenses.mit ];
1066 };
1079 };
1067 };
1080 };
1068 pyramid = super.buildPythonPackage {
1081 pyramid = super.buildPythonPackage {
1069 name = "pyramid-1.6.1";
1082 name = "pyramid-1.6.1";
1070 buildInputs = with self; [];
1083 buildInputs = with self; [];
1071 doCheck = false;
1084 doCheck = false;
1072 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1085 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1073 src = fetchurl {
1086 src = fetchurl {
1074 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1087 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1075 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1088 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1076 };
1089 };
1077 meta = {
1090 meta = {
1078 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1091 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1079 };
1092 };
1080 };
1093 };
1081 pyramid-beaker = super.buildPythonPackage {
1094 pyramid-beaker = super.buildPythonPackage {
1082 name = "pyramid-beaker-0.8";
1095 name = "pyramid-beaker-0.8";
1083 buildInputs = with self; [];
1096 buildInputs = with self; [];
1084 doCheck = false;
1097 doCheck = false;
1085 propagatedBuildInputs = with self; [pyramid Beaker];
1098 propagatedBuildInputs = with self; [pyramid Beaker];
1086 src = fetchurl {
1099 src = fetchurl {
1087 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1100 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1088 md5 = "22f14be31b06549f80890e2c63a93834";
1101 md5 = "22f14be31b06549f80890e2c63a93834";
1089 };
1102 };
1090 meta = {
1103 meta = {
1091 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1104 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1092 };
1105 };
1093 };
1106 };
1094 pyramid-debugtoolbar = super.buildPythonPackage {
1107 pyramid-debugtoolbar = super.buildPythonPackage {
1095 name = "pyramid-debugtoolbar-2.4.2";
1108 name = "pyramid-debugtoolbar-2.4.2";
1096 buildInputs = with self; [];
1109 buildInputs = with self; [];
1097 doCheck = false;
1110 doCheck = false;
1098 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1111 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1099 src = fetchurl {
1112 src = fetchurl {
1100 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1113 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1101 md5 = "073ea67086cc4bd5decc3a000853642d";
1114 md5 = "073ea67086cc4bd5decc3a000853642d";
1102 };
1115 };
1103 meta = {
1116 meta = {
1104 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1117 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1105 };
1118 };
1106 };
1119 };
1107 pyramid-jinja2 = super.buildPythonPackage {
1120 pyramid-jinja2 = super.buildPythonPackage {
1108 name = "pyramid-jinja2-2.5";
1121 name = "pyramid-jinja2-2.5";
1109 buildInputs = with self; [];
1122 buildInputs = with self; [];
1110 doCheck = false;
1123 doCheck = false;
1111 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1124 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1112 src = fetchurl {
1125 src = fetchurl {
1113 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1126 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1114 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1127 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1115 };
1128 };
1116 meta = {
1129 meta = {
1117 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1118 };
1131 };
1119 };
1132 };
1120 pyramid-mako = super.buildPythonPackage {
1133 pyramid-mako = super.buildPythonPackage {
1121 name = "pyramid-mako-1.0.2";
1134 name = "pyramid-mako-1.0.2";
1122 buildInputs = with self; [];
1135 buildInputs = with self; [];
1123 doCheck = false;
1136 doCheck = false;
1124 propagatedBuildInputs = with self; [pyramid Mako];
1137 propagatedBuildInputs = with self; [pyramid Mako];
1125 src = fetchurl {
1138 src = fetchurl {
1126 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1139 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1127 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1140 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1128 };
1141 };
1129 meta = {
1142 meta = {
1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1143 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1131 };
1144 };
1132 };
1145 };
1133 pysqlite = super.buildPythonPackage {
1146 pysqlite = super.buildPythonPackage {
1134 name = "pysqlite-2.6.3";
1147 name = "pysqlite-2.6.3";
1135 buildInputs = with self; [];
1148 buildInputs = with self; [];
1136 doCheck = false;
1149 doCheck = false;
1137 propagatedBuildInputs = with self; [];
1150 propagatedBuildInputs = with self; [];
1138 src = fetchurl {
1151 src = fetchurl {
1139 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1152 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1140 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1153 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1141 };
1154 };
1142 meta = {
1155 meta = {
1143 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1156 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1144 };
1157 };
1145 };
1158 };
1146 pytest = super.buildPythonPackage {
1159 pytest = super.buildPythonPackage {
1147 name = "pytest-2.8.5";
1160 name = "pytest-2.8.5";
1148 buildInputs = with self; [];
1161 buildInputs = with self; [];
1149 doCheck = false;
1162 doCheck = false;
1150 propagatedBuildInputs = with self; [py];
1163 propagatedBuildInputs = with self; [py];
1151 src = fetchurl {
1164 src = fetchurl {
1152 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1165 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1153 md5 = "8493b06f700862f1294298d6c1b715a9";
1166 md5 = "8493b06f700862f1294298d6c1b715a9";
1154 };
1167 };
1155 meta = {
1168 meta = {
1156 license = [ pkgs.lib.licenses.mit ];
1169 license = [ pkgs.lib.licenses.mit ];
1157 };
1170 };
1158 };
1171 };
1159 pytest-catchlog = super.buildPythonPackage {
1172 pytest-catchlog = super.buildPythonPackage {
1160 name = "pytest-catchlog-1.2.2";
1173 name = "pytest-catchlog-1.2.2";
1161 buildInputs = with self; [];
1174 buildInputs = with self; [];
1162 doCheck = false;
1175 doCheck = false;
1163 propagatedBuildInputs = with self; [py pytest];
1176 propagatedBuildInputs = with self; [py pytest];
1164 src = fetchurl {
1177 src = fetchurl {
1165 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1178 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1166 md5 = "09d890c54c7456c818102b7ff8c182c8";
1179 md5 = "09d890c54c7456c818102b7ff8c182c8";
1167 };
1180 };
1168 meta = {
1181 meta = {
1169 license = [ pkgs.lib.licenses.mit ];
1182 license = [ pkgs.lib.licenses.mit ];
1170 };
1183 };
1171 };
1184 };
1172 pytest-cov = super.buildPythonPackage {
1185 pytest-cov = super.buildPythonPackage {
1173 name = "pytest-cov-1.8.1";
1186 name = "pytest-cov-1.8.1";
1174 buildInputs = with self; [];
1187 buildInputs = with self; [];
1175 doCheck = false;
1188 doCheck = false;
1176 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1189 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1177 src = fetchurl {
1190 src = fetchurl {
1178 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1191 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1179 md5 = "76c778afa2494088270348be42d759fc";
1192 md5 = "76c778afa2494088270348be42d759fc";
1180 };
1193 };
1181 meta = {
1194 meta = {
1182 license = [ pkgs.lib.licenses.mit ];
1195 license = [ pkgs.lib.licenses.mit ];
1183 };
1196 };
1184 };
1197 };
1185 pytest-profiling = super.buildPythonPackage {
1198 pytest-profiling = super.buildPythonPackage {
1186 name = "pytest-profiling-1.0.1";
1199 name = "pytest-profiling-1.0.1";
1187 buildInputs = with self; [];
1200 buildInputs = with self; [];
1188 doCheck = false;
1201 doCheck = false;
1189 propagatedBuildInputs = with self; [six pytest gprof2dot];
1202 propagatedBuildInputs = with self; [six pytest gprof2dot];
1190 src = fetchurl {
1203 src = fetchurl {
1191 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1204 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1192 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1205 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1193 };
1206 };
1194 meta = {
1207 meta = {
1195 license = [ pkgs.lib.licenses.mit ];
1208 license = [ pkgs.lib.licenses.mit ];
1196 };
1209 };
1197 };
1210 };
1198 pytest-runner = super.buildPythonPackage {
1211 pytest-runner = super.buildPythonPackage {
1199 name = "pytest-runner-2.7.1";
1212 name = "pytest-runner-2.7.1";
1200 buildInputs = with self; [];
1213 buildInputs = with self; [];
1201 doCheck = false;
1214 doCheck = false;
1202 propagatedBuildInputs = with self; [];
1215 propagatedBuildInputs = with self; [];
1203 src = fetchurl {
1216 src = fetchurl {
1204 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1217 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1205 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1218 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1206 };
1219 };
1207 meta = {
1220 meta = {
1208 license = [ pkgs.lib.licenses.mit ];
1221 license = [ pkgs.lib.licenses.mit ];
1209 };
1222 };
1210 };
1223 };
1211 pytest-timeout = super.buildPythonPackage {
1224 pytest-timeout = super.buildPythonPackage {
1212 name = "pytest-timeout-0.4";
1225 name = "pytest-timeout-0.4";
1213 buildInputs = with self; [];
1226 buildInputs = with self; [];
1214 doCheck = false;
1227 doCheck = false;
1215 propagatedBuildInputs = with self; [pytest];
1228 propagatedBuildInputs = with self; [pytest];
1216 src = fetchurl {
1229 src = fetchurl {
1217 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1230 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1218 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1231 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1219 };
1232 };
1220 meta = {
1233 meta = {
1221 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1234 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1222 };
1235 };
1223 };
1236 };
1224 python-dateutil = super.buildPythonPackage {
1237 python-dateutil = super.buildPythonPackage {
1225 name = "python-dateutil-1.5";
1238 name = "python-dateutil-1.5";
1226 buildInputs = with self; [];
1239 buildInputs = with self; [];
1227 doCheck = false;
1240 doCheck = false;
1228 propagatedBuildInputs = with self; [];
1241 propagatedBuildInputs = with self; [];
1229 src = fetchurl {
1242 src = fetchurl {
1230 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1243 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1231 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1244 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1232 };
1245 };
1233 meta = {
1246 meta = {
1234 license = [ pkgs.lib.licenses.psfl ];
1247 license = [ pkgs.lib.licenses.psfl ];
1235 };
1248 };
1236 };
1249 };
1237 python-editor = super.buildPythonPackage {
1250 python-editor = super.buildPythonPackage {
1238 name = "python-editor-1.0.1";
1251 name = "python-editor-1.0.1";
1239 buildInputs = with self; [];
1252 buildInputs = with self; [];
1240 doCheck = false;
1253 doCheck = false;
1241 propagatedBuildInputs = with self; [];
1254 propagatedBuildInputs = with self; [];
1242 src = fetchurl {
1255 src = fetchurl {
1243 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1256 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1244 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1257 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1245 };
1258 };
1246 meta = {
1259 meta = {
1247 license = [ pkgs.lib.licenses.asl20 ];
1260 license = [ pkgs.lib.licenses.asl20 ];
1248 };
1261 };
1249 };
1262 };
1250 python-ldap = super.buildPythonPackage {
1263 python-ldap = super.buildPythonPackage {
1251 name = "python-ldap-2.4.19";
1264 name = "python-ldap-2.4.19";
1252 buildInputs = with self; [];
1265 buildInputs = with self; [];
1253 doCheck = false;
1266 doCheck = false;
1254 propagatedBuildInputs = with self; [setuptools];
1267 propagatedBuildInputs = with self; [setuptools];
1255 src = fetchurl {
1268 src = fetchurl {
1256 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1269 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1257 md5 = "b941bf31d09739492aa19ef679e94ae3";
1270 md5 = "b941bf31d09739492aa19ef679e94ae3";
1258 };
1271 };
1259 meta = {
1272 meta = {
1260 license = [ pkgs.lib.licenses.psfl ];
1273 license = [ pkgs.lib.licenses.psfl ];
1261 };
1274 };
1262 };
1275 };
1263 python-memcached = super.buildPythonPackage {
1276 python-memcached = super.buildPythonPackage {
1264 name = "python-memcached-1.57";
1277 name = "python-memcached-1.57";
1265 buildInputs = with self; [];
1278 buildInputs = with self; [];
1266 doCheck = false;
1279 doCheck = false;
1267 propagatedBuildInputs = with self; [six];
1280 propagatedBuildInputs = with self; [six];
1268 src = fetchurl {
1281 src = fetchurl {
1269 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1282 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1270 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1283 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1271 };
1284 };
1272 meta = {
1285 meta = {
1273 license = [ pkgs.lib.licenses.psfl ];
1286 license = [ pkgs.lib.licenses.psfl ];
1274 };
1287 };
1275 };
1288 };
1276 python-pam = super.buildPythonPackage {
1289 python-pam = super.buildPythonPackage {
1277 name = "python-pam-1.8.2";
1290 name = "python-pam-1.8.2";
1278 buildInputs = with self; [];
1291 buildInputs = with self; [];
1279 doCheck = false;
1292 doCheck = false;
1280 propagatedBuildInputs = with self; [];
1293 propagatedBuildInputs = with self; [];
1281 src = fetchurl {
1294 src = fetchurl {
1282 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1295 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1283 md5 = "db71b6b999246fb05d78ecfbe166629d";
1296 md5 = "db71b6b999246fb05d78ecfbe166629d";
1284 };
1297 };
1285 meta = {
1298 meta = {
1286 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1299 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1287 };
1300 };
1288 };
1301 };
1289 pytz = super.buildPythonPackage {
1302 pytz = super.buildPythonPackage {
1290 name = "pytz-2015.4";
1303 name = "pytz-2015.4";
1291 buildInputs = with self; [];
1304 buildInputs = with self; [];
1292 doCheck = false;
1305 doCheck = false;
1293 propagatedBuildInputs = with self; [];
1306 propagatedBuildInputs = with self; [];
1294 src = fetchurl {
1307 src = fetchurl {
1295 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1308 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1296 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1309 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1297 };
1310 };
1298 meta = {
1311 meta = {
1299 license = [ pkgs.lib.licenses.mit ];
1312 license = [ pkgs.lib.licenses.mit ];
1300 };
1313 };
1301 };
1314 };
1302 pyzmq = super.buildPythonPackage {
1315 pyzmq = super.buildPythonPackage {
1303 name = "pyzmq-14.6.0";
1316 name = "pyzmq-14.6.0";
1304 buildInputs = with self; [];
1317 buildInputs = with self; [];
1305 doCheck = false;
1318 doCheck = false;
1306 propagatedBuildInputs = with self; [];
1319 propagatedBuildInputs = with self; [];
1307 src = fetchurl {
1320 src = fetchurl {
1308 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1321 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1309 md5 = "395b5de95a931afa5b14c9349a5b8024";
1322 md5 = "395b5de95a931afa5b14c9349a5b8024";
1310 };
1323 };
1311 meta = {
1324 meta = {
1312 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1325 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1313 };
1326 };
1314 };
1327 };
1315 recaptcha-client = super.buildPythonPackage {
1328 recaptcha-client = super.buildPythonPackage {
1316 name = "recaptcha-client-1.0.6";
1329 name = "recaptcha-client-1.0.6";
1317 buildInputs = with self; [];
1330 buildInputs = with self; [];
1318 doCheck = false;
1331 doCheck = false;
1319 propagatedBuildInputs = with self; [];
1332 propagatedBuildInputs = with self; [];
1320 src = fetchurl {
1333 src = fetchurl {
1321 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1334 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1322 md5 = "74228180f7e1fb76c4d7089160b0d919";
1335 md5 = "74228180f7e1fb76c4d7089160b0d919";
1323 };
1336 };
1324 meta = {
1337 meta = {
1325 license = [ { fullName = "MIT/X11"; } ];
1338 license = [ { fullName = "MIT/X11"; } ];
1326 };
1339 };
1327 };
1340 };
1328 repoze.lru = super.buildPythonPackage {
1341 repoze.lru = super.buildPythonPackage {
1329 name = "repoze.lru-0.6";
1342 name = "repoze.lru-0.6";
1330 buildInputs = with self; [];
1343 buildInputs = with self; [];
1331 doCheck = false;
1344 doCheck = false;
1332 propagatedBuildInputs = with self; [];
1345 propagatedBuildInputs = with self; [];
1333 src = fetchurl {
1346 src = fetchurl {
1334 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1347 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1335 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1348 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1336 };
1349 };
1337 meta = {
1350 meta = {
1338 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1351 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1339 };
1352 };
1340 };
1353 };
1341 requests = super.buildPythonPackage {
1354 requests = super.buildPythonPackage {
1342 name = "requests-2.9.1";
1355 name = "requests-2.9.1";
1343 buildInputs = with self; [];
1356 buildInputs = with self; [];
1344 doCheck = false;
1357 doCheck = false;
1345 propagatedBuildInputs = with self; [];
1358 propagatedBuildInputs = with self; [];
1346 src = fetchurl {
1359 src = fetchurl {
1347 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1360 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1348 md5 = "0b7f480d19012ec52bab78292efd976d";
1361 md5 = "0b7f480d19012ec52bab78292efd976d";
1349 };
1362 };
1350 meta = {
1363 meta = {
1351 license = [ pkgs.lib.licenses.asl20 ];
1364 license = [ pkgs.lib.licenses.asl20 ];
1352 };
1365 };
1353 };
1366 };
1354 rhodecode-enterprise-ce = super.buildPythonPackage {
1367 rhodecode-enterprise-ce = super.buildPythonPackage {
1355 name = "rhodecode-enterprise-ce-4.3.0";
1368 name = "rhodecode-enterprise-ce-4.3.0";
1356 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1369 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1357 doCheck = true;
1370 doCheck = true;
1358 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1371 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu marshmallow msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1359 src = ./.;
1372 src = ./.;
1360 meta = {
1373 meta = {
1361 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1374 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1362 };
1375 };
1363 };
1376 };
1364 rhodecode-tools = super.buildPythonPackage {
1377 rhodecode-tools = super.buildPythonPackage {
1365 name = "rhodecode-tools-0.8.3";
1378 name = "rhodecode-tools-0.8.3";
1366 buildInputs = with self; [];
1379 buildInputs = with self; [];
1367 doCheck = false;
1380 doCheck = false;
1368 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1381 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1369 src = fetchurl {
1382 src = fetchurl {
1370 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1383 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1371 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1384 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1372 };
1385 };
1373 meta = {
1386 meta = {
1374 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1387 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1375 };
1388 };
1376 };
1389 };
1377 serpent = super.buildPythonPackage {
1390 serpent = super.buildPythonPackage {
1378 name = "serpent-1.12";
1391 name = "serpent-1.12";
1379 buildInputs = with self; [];
1392 buildInputs = with self; [];
1380 doCheck = false;
1393 doCheck = false;
1381 propagatedBuildInputs = with self; [];
1394 propagatedBuildInputs = with self; [];
1382 src = fetchurl {
1395 src = fetchurl {
1383 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1396 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1384 md5 = "05869ac7b062828b34f8f927f0457b65";
1397 md5 = "05869ac7b062828b34f8f927f0457b65";
1385 };
1398 };
1386 meta = {
1399 meta = {
1387 license = [ pkgs.lib.licenses.mit ];
1400 license = [ pkgs.lib.licenses.mit ];
1388 };
1401 };
1389 };
1402 };
1390 setproctitle = super.buildPythonPackage {
1403 setproctitle = super.buildPythonPackage {
1391 name = "setproctitle-1.1.8";
1404 name = "setproctitle-1.1.8";
1392 buildInputs = with self; [];
1405 buildInputs = with self; [];
1393 doCheck = false;
1406 doCheck = false;
1394 propagatedBuildInputs = with self; [];
1407 propagatedBuildInputs = with self; [];
1395 src = fetchurl {
1408 src = fetchurl {
1396 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1409 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1397 md5 = "728f4c8c6031bbe56083a48594027edd";
1410 md5 = "728f4c8c6031bbe56083a48594027edd";
1398 };
1411 };
1399 meta = {
1412 meta = {
1400 license = [ pkgs.lib.licenses.bsdOriginal ];
1413 license = [ pkgs.lib.licenses.bsdOriginal ];
1401 };
1414 };
1402 };
1415 };
1403 setuptools = super.buildPythonPackage {
1416 setuptools = super.buildPythonPackage {
1404 name = "setuptools-20.8.1";
1417 name = "setuptools-20.8.1";
1405 buildInputs = with self; [];
1418 buildInputs = with self; [];
1406 doCheck = false;
1419 doCheck = false;
1407 propagatedBuildInputs = with self; [];
1420 propagatedBuildInputs = with self; [];
1408 src = fetchurl {
1421 src = fetchurl {
1409 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1422 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1410 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1423 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1411 };
1424 };
1412 meta = {
1425 meta = {
1413 license = [ pkgs.lib.licenses.mit ];
1426 license = [ pkgs.lib.licenses.mit ];
1414 };
1427 };
1415 };
1428 };
1416 setuptools-scm = super.buildPythonPackage {
1429 setuptools-scm = super.buildPythonPackage {
1417 name = "setuptools-scm-1.11.0";
1430 name = "setuptools-scm-1.11.0";
1418 buildInputs = with self; [];
1431 buildInputs = with self; [];
1419 doCheck = false;
1432 doCheck = false;
1420 propagatedBuildInputs = with self; [];
1433 propagatedBuildInputs = with self; [];
1421 src = fetchurl {
1434 src = fetchurl {
1422 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1435 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1423 md5 = "4c5c896ba52e134bbc3507bac6400087";
1436 md5 = "4c5c896ba52e134bbc3507bac6400087";
1424 };
1437 };
1425 meta = {
1438 meta = {
1426 license = [ pkgs.lib.licenses.mit ];
1439 license = [ pkgs.lib.licenses.mit ];
1427 };
1440 };
1428 };
1441 };
1429 simplejson = super.buildPythonPackage {
1442 simplejson = super.buildPythonPackage {
1430 name = "simplejson-3.7.2";
1443 name = "simplejson-3.7.2";
1431 buildInputs = with self; [];
1444 buildInputs = with self; [];
1432 doCheck = false;
1445 doCheck = false;
1433 propagatedBuildInputs = with self; [];
1446 propagatedBuildInputs = with self; [];
1434 src = fetchurl {
1447 src = fetchurl {
1435 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1448 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1436 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1449 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1437 };
1450 };
1438 meta = {
1451 meta = {
1439 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1452 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1440 };
1453 };
1441 };
1454 };
1442 six = super.buildPythonPackage {
1455 six = super.buildPythonPackage {
1443 name = "six-1.9.0";
1456 name = "six-1.9.0";
1444 buildInputs = with self; [];
1457 buildInputs = with self; [];
1445 doCheck = false;
1458 doCheck = false;
1446 propagatedBuildInputs = with self; [];
1459 propagatedBuildInputs = with self; [];
1447 src = fetchurl {
1460 src = fetchurl {
1448 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1461 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1449 md5 = "476881ef4012262dfc8adc645ee786c4";
1462 md5 = "476881ef4012262dfc8adc645ee786c4";
1450 };
1463 };
1451 meta = {
1464 meta = {
1452 license = [ pkgs.lib.licenses.mit ];
1465 license = [ pkgs.lib.licenses.mit ];
1453 };
1466 };
1454 };
1467 };
1455 subprocess32 = super.buildPythonPackage {
1468 subprocess32 = super.buildPythonPackage {
1456 name = "subprocess32-3.2.6";
1469 name = "subprocess32-3.2.6";
1457 buildInputs = with self; [];
1470 buildInputs = with self; [];
1458 doCheck = false;
1471 doCheck = false;
1459 propagatedBuildInputs = with self; [];
1472 propagatedBuildInputs = with self; [];
1460 src = fetchurl {
1473 src = fetchurl {
1461 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1474 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1462 md5 = "754c5ab9f533e764f931136974b618f1";
1475 md5 = "754c5ab9f533e764f931136974b618f1";
1463 };
1476 };
1464 meta = {
1477 meta = {
1465 license = [ pkgs.lib.licenses.psfl ];
1478 license = [ pkgs.lib.licenses.psfl ];
1466 };
1479 };
1467 };
1480 };
1468 supervisor = super.buildPythonPackage {
1481 supervisor = super.buildPythonPackage {
1469 name = "supervisor-3.3.0";
1482 name = "supervisor-3.3.0";
1470 buildInputs = with self; [];
1483 buildInputs = with self; [];
1471 doCheck = false;
1484 doCheck = false;
1472 propagatedBuildInputs = with self; [meld3];
1485 propagatedBuildInputs = with self; [meld3];
1473 src = fetchurl {
1486 src = fetchurl {
1474 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1487 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1475 md5 = "46bac00378d1eddb616752b990c67416";
1488 md5 = "46bac00378d1eddb616752b990c67416";
1476 };
1489 };
1477 meta = {
1490 meta = {
1478 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1491 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1479 };
1492 };
1480 };
1493 };
1481 transifex-client = super.buildPythonPackage {
1494 transifex-client = super.buildPythonPackage {
1482 name = "transifex-client-0.10";
1495 name = "transifex-client-0.10";
1483 buildInputs = with self; [];
1496 buildInputs = with self; [];
1484 doCheck = false;
1497 doCheck = false;
1485 propagatedBuildInputs = with self; [];
1498 propagatedBuildInputs = with self; [];
1486 src = fetchurl {
1499 src = fetchurl {
1487 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1500 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1488 md5 = "5549538d84b8eede6b254cd81ae024fa";
1501 md5 = "5549538d84b8eede6b254cd81ae024fa";
1489 };
1502 };
1490 meta = {
1503 meta = {
1491 license = [ pkgs.lib.licenses.gpl2 ];
1504 license = [ pkgs.lib.licenses.gpl2 ];
1492 };
1505 };
1493 };
1506 };
1494 translationstring = super.buildPythonPackage {
1507 translationstring = super.buildPythonPackage {
1495 name = "translationstring-1.3";
1508 name = "translationstring-1.3";
1496 buildInputs = with self; [];
1509 buildInputs = with self; [];
1497 doCheck = false;
1510 doCheck = false;
1498 propagatedBuildInputs = with self; [];
1511 propagatedBuildInputs = with self; [];
1499 src = fetchurl {
1512 src = fetchurl {
1500 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1513 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1501 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1514 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1502 };
1515 };
1503 meta = {
1516 meta = {
1504 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1517 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1505 };
1518 };
1506 };
1519 };
1507 trollius = super.buildPythonPackage {
1520 trollius = super.buildPythonPackage {
1508 name = "trollius-1.0.4";
1521 name = "trollius-1.0.4";
1509 buildInputs = with self; [];
1522 buildInputs = with self; [];
1510 doCheck = false;
1523 doCheck = false;
1511 propagatedBuildInputs = with self; [futures];
1524 propagatedBuildInputs = with self; [futures];
1512 src = fetchurl {
1525 src = fetchurl {
1513 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1526 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1514 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1527 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1515 };
1528 };
1516 meta = {
1529 meta = {
1517 license = [ pkgs.lib.licenses.asl20 ];
1530 license = [ pkgs.lib.licenses.asl20 ];
1518 };
1531 };
1519 };
1532 };
1520 uWSGI = super.buildPythonPackage {
1533 uWSGI = super.buildPythonPackage {
1521 name = "uWSGI-2.0.11.2";
1534 name = "uWSGI-2.0.11.2";
1522 buildInputs = with self; [];
1535 buildInputs = with self; [];
1523 doCheck = false;
1536 doCheck = false;
1524 propagatedBuildInputs = with self; [];
1537 propagatedBuildInputs = with self; [];
1525 src = fetchurl {
1538 src = fetchurl {
1526 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1539 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1527 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1540 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1528 };
1541 };
1529 meta = {
1542 meta = {
1530 license = [ pkgs.lib.licenses.gpl2 ];
1543 license = [ pkgs.lib.licenses.gpl2 ];
1531 };
1544 };
1532 };
1545 };
1533 urllib3 = super.buildPythonPackage {
1546 urllib3 = super.buildPythonPackage {
1534 name = "urllib3-1.16";
1547 name = "urllib3-1.16";
1535 buildInputs = with self; [];
1548 buildInputs = with self; [];
1536 doCheck = false;
1549 doCheck = false;
1537 propagatedBuildInputs = with self; [];
1550 propagatedBuildInputs = with self; [];
1538 src = fetchurl {
1551 src = fetchurl {
1539 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1552 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1540 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1553 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1541 };
1554 };
1542 meta = {
1555 meta = {
1543 license = [ pkgs.lib.licenses.mit ];
1556 license = [ pkgs.lib.licenses.mit ];
1544 };
1557 };
1545 };
1558 };
1546 venusian = super.buildPythonPackage {
1559 venusian = super.buildPythonPackage {
1547 name = "venusian-1.0";
1560 name = "venusian-1.0";
1548 buildInputs = with self; [];
1561 buildInputs = with self; [];
1549 doCheck = false;
1562 doCheck = false;
1550 propagatedBuildInputs = with self; [];
1563 propagatedBuildInputs = with self; [];
1551 src = fetchurl {
1564 src = fetchurl {
1552 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1565 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1553 md5 = "dccf2eafb7113759d60c86faf5538756";
1566 md5 = "dccf2eafb7113759d60c86faf5538756";
1554 };
1567 };
1555 meta = {
1568 meta = {
1556 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1569 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1557 };
1570 };
1558 };
1571 };
1559 waitress = super.buildPythonPackage {
1572 waitress = super.buildPythonPackage {
1560 name = "waitress-0.8.9";
1573 name = "waitress-0.8.9";
1561 buildInputs = with self; [];
1574 buildInputs = with self; [];
1562 doCheck = false;
1575 doCheck = false;
1563 propagatedBuildInputs = with self; [setuptools];
1576 propagatedBuildInputs = with self; [setuptools];
1564 src = fetchurl {
1577 src = fetchurl {
1565 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1578 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1566 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1579 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1567 };
1580 };
1568 meta = {
1581 meta = {
1569 license = [ pkgs.lib.licenses.zpt21 ];
1582 license = [ pkgs.lib.licenses.zpt21 ];
1570 };
1583 };
1571 };
1584 };
1572 wsgiref = super.buildPythonPackage {
1585 wsgiref = super.buildPythonPackage {
1573 name = "wsgiref-0.1.2";
1586 name = "wsgiref-0.1.2";
1574 buildInputs = with self; [];
1587 buildInputs = with self; [];
1575 doCheck = false;
1588 doCheck = false;
1576 propagatedBuildInputs = with self; [];
1589 propagatedBuildInputs = with self; [];
1577 src = fetchurl {
1590 src = fetchurl {
1578 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1591 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1579 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1592 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1580 };
1593 };
1581 meta = {
1594 meta = {
1582 license = [ { fullName = "PSF or ZPL"; } ];
1595 license = [ { fullName = "PSF or ZPL"; } ];
1583 };
1596 };
1584 };
1597 };
1585 zope.cachedescriptors = super.buildPythonPackage {
1598 zope.cachedescriptors = super.buildPythonPackage {
1586 name = "zope.cachedescriptors-4.0.0";
1599 name = "zope.cachedescriptors-4.0.0";
1587 buildInputs = with self; [];
1600 buildInputs = with self; [];
1588 doCheck = false;
1601 doCheck = false;
1589 propagatedBuildInputs = with self; [setuptools];
1602 propagatedBuildInputs = with self; [setuptools];
1590 src = fetchurl {
1603 src = fetchurl {
1591 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1604 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1592 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1605 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1593 };
1606 };
1594 meta = {
1607 meta = {
1595 license = [ pkgs.lib.licenses.zpt21 ];
1608 license = [ pkgs.lib.licenses.zpt21 ];
1596 };
1609 };
1597 };
1610 };
1598 zope.deprecation = super.buildPythonPackage {
1611 zope.deprecation = super.buildPythonPackage {
1599 name = "zope.deprecation-4.1.2";
1612 name = "zope.deprecation-4.1.2";
1600 buildInputs = with self; [];
1613 buildInputs = with self; [];
1601 doCheck = false;
1614 doCheck = false;
1602 propagatedBuildInputs = with self; [setuptools];
1615 propagatedBuildInputs = with self; [setuptools];
1603 src = fetchurl {
1616 src = fetchurl {
1604 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1617 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1605 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1618 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1606 };
1619 };
1607 meta = {
1620 meta = {
1608 license = [ pkgs.lib.licenses.zpt21 ];
1621 license = [ pkgs.lib.licenses.zpt21 ];
1609 };
1622 };
1610 };
1623 };
1611 zope.event = super.buildPythonPackage {
1624 zope.event = super.buildPythonPackage {
1612 name = "zope.event-4.0.3";
1625 name = "zope.event-4.0.3";
1613 buildInputs = with self; [];
1626 buildInputs = with self; [];
1614 doCheck = false;
1627 doCheck = false;
1615 propagatedBuildInputs = with self; [setuptools];
1628 propagatedBuildInputs = with self; [setuptools];
1616 src = fetchurl {
1629 src = fetchurl {
1617 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1630 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1618 md5 = "9a3780916332b18b8b85f522bcc3e249";
1631 md5 = "9a3780916332b18b8b85f522bcc3e249";
1619 };
1632 };
1620 meta = {
1633 meta = {
1621 license = [ pkgs.lib.licenses.zpt21 ];
1634 license = [ pkgs.lib.licenses.zpt21 ];
1622 };
1635 };
1623 };
1636 };
1624 zope.interface = super.buildPythonPackage {
1637 zope.interface = super.buildPythonPackage {
1625 name = "zope.interface-4.1.3";
1638 name = "zope.interface-4.1.3";
1626 buildInputs = with self; [];
1639 buildInputs = with self; [];
1627 doCheck = false;
1640 doCheck = false;
1628 propagatedBuildInputs = with self; [setuptools];
1641 propagatedBuildInputs = with self; [setuptools];
1629 src = fetchurl {
1642 src = fetchurl {
1630 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1643 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1631 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1644 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1632 };
1645 };
1633 meta = {
1646 meta = {
1634 license = [ pkgs.lib.licenses.zpt21 ];
1647 license = [ pkgs.lib.licenses.zpt21 ];
1635 };
1648 };
1636 };
1649 };
1637
1650
1638 ### Test requirements
1651 ### Test requirements
1639
1652
1640
1653
1641 }
1654 }
@@ -1,151 +1,152 b''
1 Babel==1.3
1 Babel==1.3
2 Beaker==1.7.0
2 Beaker==1.7.0
3 CProfileV==1.0.6
3 CProfileV==1.0.6
4 Fabric==1.10.0
4 Fabric==1.10.0
5 FormEncode==1.2.4
5 FormEncode==1.2.4
6 Jinja2==2.7.3
6 Jinja2==2.7.3
7 Mako==1.0.1
7 Mako==1.0.1
8 Markdown==2.6.2
8 Markdown==2.6.2
9 MarkupSafe==0.23
9 MarkupSafe==0.23
10 MySQL-python==1.2.5
10 MySQL-python==1.2.5
11 Paste==2.0.2
11 Paste==2.0.2
12 PasteDeploy==1.5.2
12 PasteDeploy==1.5.2
13 PasteScript==1.7.5
13 PasteScript==1.7.5
14 Pygments==2.1.3
14 Pygments==2.1.3
15
15
16 # TODO: This version is not available on PyPI
16 # TODO: This version is not available on PyPI
17 # Pylons==1.0.2.dev20160108
17 # Pylons==1.0.2.dev20160108
18 Pylons==1.0.1
18 Pylons==1.0.1
19
19
20 # TODO: This version is not available, but newer ones are
20 # TODO: This version is not available, but newer ones are
21 # Pyro4==4.35
21 # Pyro4==4.35
22 Pyro4==4.41
22 Pyro4==4.41
23
23
24 # TODO: This should probably not be in here
24 # TODO: This should probably not be in here
25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
26
26
27 # TODO: This is not really a dependency, we should add it only
27 # TODO: This is not really a dependency, we should add it only
28 # into the development environment, since there it is useful.
28 # into the development environment, since there it is useful.
29 # RhodeCodeVCSServer==3.9.0
29 # RhodeCodeVCSServer==3.9.0
30
30
31 Routes==1.13
31 Routes==1.13
32 SQLAlchemy==0.9.9
32 SQLAlchemy==0.9.9
33 Sphinx==1.2.2
33 Sphinx==1.2.2
34 Tempita==0.5.2
34 Tempita==0.5.2
35 URLObject==2.4.0
35 URLObject==2.4.0
36 WebError==0.10.3
36 WebError==0.10.3
37
37
38 # TODO: This is modified by us, needs a better integration. For now
38 # TODO: This is modified by us, needs a better integration. For now
39 # using the latest version before.
39 # using the latest version before.
40 # WebHelpers==1.3.dev20150807
40 # WebHelpers==1.3.dev20150807
41 WebHelpers==1.3
41 WebHelpers==1.3
42
42
43 WebHelpers2==2.0
43 WebHelpers2==2.0
44 WebOb==1.3.1
44 WebOb==1.3.1
45 WebTest==1.4.3
45 WebTest==1.4.3
46 Whoosh==2.7.0
46 Whoosh==2.7.0
47 alembic==0.8.4
47 alembic==0.8.4
48 amqplib==1.0.2
48 amqplib==1.0.2
49 anyjson==0.3.3
49 anyjson==0.3.3
50 appenlight-client==0.6.14
50 appenlight-client==0.6.14
51 authomatic==0.1.0.post1;
51 authomatic==0.1.0.post1;
52 backport-ipaddress==0.1
52 backport-ipaddress==0.1
53 bottle==0.12.8
53 bottle==0.12.8
54 bumpversion==0.5.3
54 bumpversion==0.5.3
55 celery==2.2.10
55 celery==2.2.10
56 click==5.1
56 click==5.1
57 colander==1.2
57 colander==1.2
58 configobj==5.0.6
58 configobj==5.0.6
59 cov-core==1.15.0
59 cov-core==1.15.0
60 coverage==3.7.1
60 coverage==3.7.1
61 cssselect==0.9.1
61 cssselect==0.9.1
62 decorator==3.4.2
62 decorator==3.4.2
63 docutils==0.12
63 docutils==0.12
64 dogpile.cache==0.6.1
64 dogpile.cache==0.6.1
65 dogpile.core==0.4.1
65 dogpile.core==0.4.1
66 dulwich==0.12.0
66 dulwich==0.12.0
67 ecdsa==0.11
67 ecdsa==0.11
68 flake8==2.4.1
68 flake8==2.4.1
69 future==0.14.3
69 future==0.14.3
70 futures==3.0.2
70 futures==3.0.2
71 gprof2dot==2015.12.1
71 gprof2dot==2015.12.1
72 gunicorn==19.6.0
72 gunicorn==19.6.0
73
73
74 # TODO: Needs subvertpy and blows up without Subversion headers,
74 # TODO: Needs subvertpy and blows up without Subversion headers,
75 # actually we should not need this for Enterprise at all.
75 # actually we should not need this for Enterprise at all.
76 # hgsubversion==1.8.2
76 # hgsubversion==1.8.2
77
77
78 gnureadline==6.3.3
78 gnureadline==6.3.3
79 infrae.cache==1.0.1
79 infrae.cache==1.0.1
80 invoke==0.13.0
80 invoke==0.13.0
81 ipdb==0.8
81 ipdb==0.8
82 ipython==3.1.0
82 ipython==3.1.0
83 iso8601==0.1.11
83 iso8601==0.1.11
84 itsdangerous==0.24
84 itsdangerous==0.24
85 kombu==1.5.1
85 kombu==1.5.1
86 lxml==3.4.4
86 lxml==3.4.4
87 marshmallow==2.8.0
87 mccabe==0.3
88 mccabe==0.3
88 meld3==1.0.2
89 meld3==1.0.2
89 mock==1.0.1
90 mock==1.0.1
90 msgpack-python==0.4.6
91 msgpack-python==0.4.6
91 nose==1.3.6
92 nose==1.3.6
92 objgraph==2.0.0
93 objgraph==2.0.0
93 packaging==15.2
94 packaging==15.2
94 paramiko==1.15.1
95 paramiko==1.15.1
95 pep8==1.5.7
96 pep8==1.5.7
96 psutil==2.2.1
97 psutil==2.2.1
97 psycopg2==2.6.1
98 psycopg2==2.6.1
98 py==1.4.29
99 py==1.4.29
99 py-bcrypt==0.4
100 py-bcrypt==0.4
100 py-gfm==0.1.3
101 py-gfm==0.1.3
101 pycrypto==2.6.1
102 pycrypto==2.6.1
102 pycurl==7.19.5
103 pycurl==7.19.5
103 pyflakes==0.8.1
104 pyflakes==0.8.1
104 pyparsing==1.5.7
105 pyparsing==1.5.7
105 pyramid==1.6.1
106 pyramid==1.6.1
106 pyramid-beaker==0.8
107 pyramid-beaker==0.8
107 pyramid-debugtoolbar==2.4.2
108 pyramid-debugtoolbar==2.4.2
108 pyramid-jinja2==2.5
109 pyramid-jinja2==2.5
109 pyramid-mako==1.0.2
110 pyramid-mako==1.0.2
110 pysqlite==2.6.3
111 pysqlite==2.6.3
111 pytest==2.8.5
112 pytest==2.8.5
112 pytest-runner==2.7.1
113 pytest-runner==2.7.1
113 pytest-catchlog==1.2.2
114 pytest-catchlog==1.2.2
114 pytest-cov==1.8.1
115 pytest-cov==1.8.1
115 pytest-profiling==1.0.1
116 pytest-profiling==1.0.1
116 pytest-timeout==0.4
117 pytest-timeout==0.4
117 python-dateutil==1.5
118 python-dateutil==1.5
118 python-ldap==2.4.19
119 python-ldap==2.4.19
119 python-memcached==1.57
120 python-memcached==1.57
120 python-pam==1.8.2
121 python-pam==1.8.2
121 pytz==2015.4
122 pytz==2015.4
122 pyzmq==14.6.0
123 pyzmq==14.6.0
123
124
124 # TODO: This is not available in public
125 # TODO: This is not available in public
125 # rc-testdata==0.2.0
126 # rc-testdata==0.2.0
126
127
127 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
128 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
128
129
129
130
130 recaptcha-client==1.0.6
131 recaptcha-client==1.0.6
131 repoze.lru==0.6
132 repoze.lru==0.6
132 requests==2.9.1
133 requests==2.9.1
133 serpent==1.12
134 serpent==1.12
134 setproctitle==1.1.8
135 setproctitle==1.1.8
135 setuptools==20.8.1
136 setuptools==20.8.1
136 setuptools-scm==1.11.0
137 setuptools-scm==1.11.0
137 simplejson==3.7.2
138 simplejson==3.7.2
138 six==1.9.0
139 six==1.9.0
139 subprocess32==3.2.6
140 subprocess32==3.2.6
140 supervisor==3.3.0
141 supervisor==3.3.0
141 transifex-client==0.10
142 transifex-client==0.10
142 translationstring==1.3
143 translationstring==1.3
143 trollius==1.0.4
144 trollius==1.0.4
144 uWSGI==2.0.11.2
145 uWSGI==2.0.11.2
145 venusian==1.0
146 venusian==1.0
146 waitress==0.8.9
147 waitress==0.8.9
147 wsgiref==0.1.2
148 wsgiref==0.1.2
148 zope.cachedescriptors==4.0.0
149 zope.cachedescriptors==4.0.0
149 zope.deprecation==4.1.2
150 zope.deprecation==4.1.2
150 zope.event==4.0.3
151 zope.event==4.0.3
151 zope.interface==4.1.3
152 zope.interface==4.1.3
@@ -1,158 +1,163 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.fixture import Fixture
29
29
30
30
31 fixture = Fixture()
31 fixture = Fixture()
32
32
33 UPDATE_REPO_NAME = 'api_update_me'
33 UPDATE_REPO_NAME = 'api_update_me'
34
34
35 class SAME_AS_UPDATES(object): """ Constant used for tests below """
35 class SAME_AS_UPDATES(object): """ Constant used for tests below """
36
36
37 @pytest.mark.usefixtures("testuser_api", "app")
37 @pytest.mark.usefixtures("testuser_api", "app")
38 class TestApiUpdateRepo(object):
38 class TestApiUpdateRepo(object):
39
39
40 @pytest.mark.parametrize("updates, expected", [
40 @pytest.mark.parametrize("updates, expected", [
41 ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES),
41 ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES),
42 ({'description': 'new description'}, SAME_AS_UPDATES),
42 ({'description': 'new description'}, SAME_AS_UPDATES),
43 ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES),
43 ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES),
44 ({'clone_uri': None}, {'clone_uri': ''}),
44 ({'clone_uri': None}, {'clone_uri': ''}),
45 ({'clone_uri': ''}, {'clone_uri': ''}),
45 ({'clone_uri': ''}, {'clone_uri': ''}),
46 ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}),
46 ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}),
47 ({'enable_statistics': True}, SAME_AS_UPDATES),
47 ({'enable_statistics': True}, SAME_AS_UPDATES),
48 ({'enable_locking': True}, SAME_AS_UPDATES),
48 ({'enable_locking': True}, SAME_AS_UPDATES),
49 ({'enable_downloads': True}, SAME_AS_UPDATES),
49 ({'enable_downloads': True}, SAME_AS_UPDATES),
50 ({'name': 'new_repo_name'}, {'repo_name': 'new_repo_name'}),
50 ({'name': 'new_repo_name'}, {
51 ({'group': 'test_group_for_update'},
51 'repo_name': 'new_repo_name',
52 {'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME}),
52 'url': 'http://test.example.com:80/new_repo_name',
53 }),
54 ({'group': 'test_group_for_update'}, {
55 'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME,
56 'url': 'http://test.example.com:80/test_group_for_update/%s' % UPDATE_REPO_NAME
57 }),
53 ])
58 ])
54 def test_api_update_repo(self, updates, expected, backend):
59 def test_api_update_repo(self, updates, expected, backend):
55 repo_name = UPDATE_REPO_NAME
60 repo_name = UPDATE_REPO_NAME
56 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
61 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
57 if updates.get('group'):
62 if updates.get('group'):
58 fixture.create_repo_group(updates['group'])
63 fixture.create_repo_group(updates['group'])
59
64
60 expected_api_data = repo.get_api_data(include_secrets=True)
65 expected_api_data = repo.get_api_data(include_secrets=True)
61 if expected is SAME_AS_UPDATES:
66 if expected is SAME_AS_UPDATES:
62 expected_api_data.update(updates)
67 expected_api_data.update(updates)
63 else:
68 else:
64 expected_api_data.update(expected)
69 expected_api_data.update(expected)
65
70
66
71
67 id_, params = build_data(
72 id_, params = build_data(
68 self.apikey, 'update_repo', repoid=repo_name, **updates)
73 self.apikey, 'update_repo', repoid=repo_name, **updates)
69 response = api_call(self.app, params)
74 response = api_call(self.app, params)
70
75
71 if updates.get('name'):
76 if updates.get('name'):
72 repo_name = updates['name']
77 repo_name = updates['name']
73 if updates.get('group'):
78 if updates.get('group'):
74 repo_name = '/'.join([updates['group'], repo_name])
79 repo_name = '/'.join([updates['group'], repo_name])
75
80
76 try:
81 try:
77 expected = {
82 expected = {
78 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
83 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
79 'repository': jsonify(expected_api_data)
84 'repository': jsonify(expected_api_data)
80 }
85 }
81 assert_ok(id_, expected, given=response.body)
86 assert_ok(id_, expected, given=response.body)
82 finally:
87 finally:
83 fixture.destroy_repo(repo_name)
88 fixture.destroy_repo(repo_name)
84 if updates.get('group'):
89 if updates.get('group'):
85 fixture.destroy_repo_group(updates['group'])
90 fixture.destroy_repo_group(updates['group'])
86
91
87 def test_api_update_repo_fork_of_field(self, backend):
92 def test_api_update_repo_fork_of_field(self, backend):
88 master_repo = backend.create_repo()
93 master_repo = backend.create_repo()
89 repo = backend.create_repo()
94 repo = backend.create_repo()
90 updates = {
95 updates = {
91 'fork_of': master_repo.repo_name
96 'fork_of': master_repo.repo_name
92 }
97 }
93 expected_api_data = repo.get_api_data(include_secrets=True)
98 expected_api_data = repo.get_api_data(include_secrets=True)
94 expected_api_data.update(updates)
99 expected_api_data.update(updates)
95
100
96 id_, params = build_data(
101 id_, params = build_data(
97 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
102 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
98 response = api_call(self.app, params)
103 response = api_call(self.app, params)
99 expected = {
104 expected = {
100 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
105 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
101 'repository': jsonify(expected_api_data)
106 'repository': jsonify(expected_api_data)
102 }
107 }
103 assert_ok(id_, expected, given=response.body)
108 assert_ok(id_, expected, given=response.body)
104 result = response.json['result']['repository']
109 result = response.json['result']['repository']
105 assert result['fork_of'] == master_repo.repo_name
110 assert result['fork_of'] == master_repo.repo_name
106
111
107 def test_api_update_repo_fork_of_not_found(self, backend):
112 def test_api_update_repo_fork_of_not_found(self, backend):
108 master_repo_name = 'fake-parent-repo'
113 master_repo_name = 'fake-parent-repo'
109 repo = backend.create_repo()
114 repo = backend.create_repo()
110 updates = {
115 updates = {
111 'fork_of': master_repo_name
116 'fork_of': master_repo_name
112 }
117 }
113 id_, params = build_data(
118 id_, params = build_data(
114 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
119 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
115 response = api_call(self.app, params)
120 response = api_call(self.app, params)
116 expected = 'repository `{}` does not exist'.format(master_repo_name)
121 expected = 'repository `{}` does not exist'.format(master_repo_name)
117 assert_error(id_, expected, given=response.body)
122 assert_error(id_, expected, given=response.body)
118
123
119 def test_api_update_repo_with_repo_group_not_existing(self):
124 def test_api_update_repo_with_repo_group_not_existing(self):
120 repo_name = 'admin_owned'
125 repo_name = 'admin_owned'
121 fixture.create_repo(repo_name)
126 fixture.create_repo(repo_name)
122 updates = {'group': 'test_group_for_update'}
127 updates = {'group': 'test_group_for_update'}
123 id_, params = build_data(
128 id_, params = build_data(
124 self.apikey, 'update_repo', repoid=repo_name, **updates)
129 self.apikey, 'update_repo', repoid=repo_name, **updates)
125 response = api_call(self.app, params)
130 response = api_call(self.app, params)
126 try:
131 try:
127 expected = 'repository group `%s` does not exist' % (
132 expected = 'repository group `%s` does not exist' % (
128 updates['group'],)
133 updates['group'],)
129 assert_error(id_, expected, given=response.body)
134 assert_error(id_, expected, given=response.body)
130 finally:
135 finally:
131 fixture.destroy_repo(repo_name)
136 fixture.destroy_repo(repo_name)
132
137
133 def test_api_update_repo_regular_user_not_allowed(self):
138 def test_api_update_repo_regular_user_not_allowed(self):
134 repo_name = 'admin_owned'
139 repo_name = 'admin_owned'
135 fixture.create_repo(repo_name)
140 fixture.create_repo(repo_name)
136 updates = {'active': False}
141 updates = {'active': False}
137 id_, params = build_data(
142 id_, params = build_data(
138 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
143 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
139 response = api_call(self.app, params)
144 response = api_call(self.app, params)
140 try:
145 try:
141 expected = 'repository `%s` does not exist' % (repo_name,)
146 expected = 'repository `%s` does not exist' % (repo_name,)
142 assert_error(id_, expected, given=response.body)
147 assert_error(id_, expected, given=response.body)
143 finally:
148 finally:
144 fixture.destroy_repo(repo_name)
149 fixture.destroy_repo(repo_name)
145
150
146 @mock.patch.object(RepoModel, 'update', crash)
151 @mock.patch.object(RepoModel, 'update', crash)
147 def test_api_update_repo_exception_occurred(self, backend):
152 def test_api_update_repo_exception_occurred(self, backend):
148 repo_name = UPDATE_REPO_NAME
153 repo_name = UPDATE_REPO_NAME
149 fixture.create_repo(repo_name, repo_type=backend.alias)
154 fixture.create_repo(repo_name, repo_type=backend.alias)
150 id_, params = build_data(
155 id_, params = build_data(
151 self.apikey, 'update_repo', repoid=repo_name,
156 self.apikey, 'update_repo', repoid=repo_name,
152 owner=TEST_USER_ADMIN_LOGIN,)
157 owner=TEST_USER_ADMIN_LOGIN,)
153 response = api_call(self.app, params)
158 response = api_call(self.app, params)
154 try:
159 try:
155 expected = 'failed to update repo `%s`' % (repo_name,)
160 expected = 'failed to update repo `%s`' % (repo_name,)
156 assert_error(id_, expected, given=response.body)
161 assert_error(id_, expected, given=response.body)
157 finally:
162 finally:
158 fixture.destroy_repo(repo_name)
163 fixture.destroy_repo(repo_name)
@@ -1,846 +1,847 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 pull requests controller for rhodecode for initializing pull requests
22 pull requests controller for rhodecode for initializing pull requests
23 """
23 """
24
24
25 import formencode
25 import formencode
26 import logging
26 import logging
27
27
28 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
28 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
29 from pylons import request, tmpl_context as c, url
29 from pylons import request, tmpl_context as c, url
30 from pylons.controllers.util import redirect
30 from pylons.controllers.util import redirect
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from sqlalchemy.sql import func
32 from sqlalchemy.sql import func
33 from sqlalchemy.sql.expression import or_
33 from sqlalchemy.sql.expression import or_
34
34
35 from rhodecode.lib import auth, diffs, helpers as h
35 from rhodecode.lib import auth, diffs, helpers as h
36 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.base import (
37 from rhodecode.lib.base import (
38 BaseRepoController, render, vcs_operation_context)
38 BaseRepoController, render, vcs_operation_context)
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
40 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
41 HasAcceptedRepoType, XHRRequired)
41 HasAcceptedRepoType, XHRRequired)
42 from rhodecode.lib.utils import jsonify
42 from rhodecode.lib.utils import jsonify
43 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
43 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError)
46 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError)
47 from rhodecode.lib.diffs import LimitedDiffContainer
47 from rhodecode.lib.diffs import LimitedDiffContainer
48 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.comment import ChangesetCommentsModel
49 from rhodecode.model.comment import ChangesetCommentsModel
50 from rhodecode.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
50 from rhodecode.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
51 Repository
51 Repository
52 from rhodecode.model.forms import PullRequestForm
52 from rhodecode.model.forms import PullRequestForm
53 from rhodecode.model.meta import Session
53 from rhodecode.model.meta import Session
54 from rhodecode.model.pull_request import PullRequestModel
54 from rhodecode.model.pull_request import PullRequestModel
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class PullrequestsController(BaseRepoController):
59 class PullrequestsController(BaseRepoController):
60 def __before__(self):
60 def __before__(self):
61 super(PullrequestsController, self).__before__()
61 super(PullrequestsController, self).__before__()
62
62
63 def _load_compare_data(self, pull_request, enable_comments=True):
63 def _load_compare_data(self, pull_request, enable_comments=True):
64 """
64 """
65 Load context data needed for generating compare diff
65 Load context data needed for generating compare diff
66
66
67 :param pull_request: object related to the request
67 :param pull_request: object related to the request
68 :param enable_comments: flag to determine if comments are included
68 :param enable_comments: flag to determine if comments are included
69 """
69 """
70 source_repo = pull_request.source_repo
70 source_repo = pull_request.source_repo
71 source_ref_id = pull_request.source_ref_parts.commit_id
71 source_ref_id = pull_request.source_ref_parts.commit_id
72
72
73 target_repo = pull_request.target_repo
73 target_repo = pull_request.target_repo
74 target_ref_id = pull_request.target_ref_parts.commit_id
74 target_ref_id = pull_request.target_ref_parts.commit_id
75
75
76 # despite opening commits for bookmarks/branches/tags, we always
76 # despite opening commits for bookmarks/branches/tags, we always
77 # convert this to rev to prevent changes after bookmark or branch change
77 # convert this to rev to prevent changes after bookmark or branch change
78 c.source_ref_type = 'rev'
78 c.source_ref_type = 'rev'
79 c.source_ref = source_ref_id
79 c.source_ref = source_ref_id
80
80
81 c.target_ref_type = 'rev'
81 c.target_ref_type = 'rev'
82 c.target_ref = target_ref_id
82 c.target_ref = target_ref_id
83
83
84 c.source_repo = source_repo
84 c.source_repo = source_repo
85 c.target_repo = target_repo
85 c.target_repo = target_repo
86
86
87 c.fulldiff = bool(request.GET.get('fulldiff'))
87 c.fulldiff = bool(request.GET.get('fulldiff'))
88
88
89 # diff_limit is the old behavior, will cut off the whole diff
89 # diff_limit is the old behavior, will cut off the whole diff
90 # if the limit is applied otherwise will just hide the
90 # if the limit is applied otherwise will just hide the
91 # big files from the front-end
91 # big files from the front-end
92 diff_limit = self.cut_off_limit_diff
92 diff_limit = self.cut_off_limit_diff
93 file_limit = self.cut_off_limit_file
93 file_limit = self.cut_off_limit_file
94
94
95 pre_load = ["author", "branch", "date", "message"]
95 pre_load = ["author", "branch", "date", "message"]
96
96
97 c.commit_ranges = []
97 c.commit_ranges = []
98 source_commit = EmptyCommit()
98 source_commit = EmptyCommit()
99 target_commit = EmptyCommit()
99 target_commit = EmptyCommit()
100 c.missing_requirements = False
100 c.missing_requirements = False
101 try:
101 try:
102 c.commit_ranges = [
102 c.commit_ranges = [
103 source_repo.get_commit(commit_id=rev, pre_load=pre_load)
103 source_repo.get_commit(commit_id=rev, pre_load=pre_load)
104 for rev in pull_request.revisions]
104 for rev in pull_request.revisions]
105
105
106 c.statuses = source_repo.statuses(
106 c.statuses = source_repo.statuses(
107 [x.raw_id for x in c.commit_ranges])
107 [x.raw_id for x in c.commit_ranges])
108
108
109 target_commit = source_repo.get_commit(
109 target_commit = source_repo.get_commit(
110 commit_id=safe_str(target_ref_id))
110 commit_id=safe_str(target_ref_id))
111 source_commit = source_repo.get_commit(
111 source_commit = source_repo.get_commit(
112 commit_id=safe_str(source_ref_id))
112 commit_id=safe_str(source_ref_id))
113 except RepositoryRequirementError:
113 except RepositoryRequirementError:
114 c.missing_requirements = True
114 c.missing_requirements = True
115
115
116 c.missing_commits = False
116 c.missing_commits = False
117 if (c.missing_requirements or
117 if (c.missing_requirements or
118 isinstance(source_commit, EmptyCommit) or
118 isinstance(source_commit, EmptyCommit) or
119 source_commit == target_commit):
119 source_commit == target_commit):
120 _parsed = []
120 _parsed = []
121 c.missing_commits = True
121 c.missing_commits = True
122 else:
122 else:
123 vcs_diff = PullRequestModel().get_diff(pull_request)
123 vcs_diff = PullRequestModel().get_diff(pull_request)
124 diff_processor = diffs.DiffProcessor(
124 diff_processor = diffs.DiffProcessor(
125 vcs_diff, format='gitdiff', diff_limit=diff_limit,
125 vcs_diff, format='gitdiff', diff_limit=diff_limit,
126 file_limit=file_limit, show_full_diff=c.fulldiff)
126 file_limit=file_limit, show_full_diff=c.fulldiff)
127 _parsed = diff_processor.prepare()
127 _parsed = diff_processor.prepare()
128
128
129 c.limited_diff = isinstance(_parsed, LimitedDiffContainer)
129 c.limited_diff = isinstance(_parsed, LimitedDiffContainer)
130
130
131 c.files = []
131 c.files = []
132 c.changes = {}
132 c.changes = {}
133 c.lines_added = 0
133 c.lines_added = 0
134 c.lines_deleted = 0
134 c.lines_deleted = 0
135 c.included_files = []
135 c.included_files = []
136 c.deleted_files = []
136 c.deleted_files = []
137
137
138 for f in _parsed:
138 for f in _parsed:
139 st = f['stats']
139 st = f['stats']
140 c.lines_added += st['added']
140 c.lines_added += st['added']
141 c.lines_deleted += st['deleted']
141 c.lines_deleted += st['deleted']
142
142
143 fid = h.FID('', f['filename'])
143 fid = h.FID('', f['filename'])
144 c.files.append([fid, f['operation'], f['filename'], f['stats']])
144 c.files.append([fid, f['operation'], f['filename'], f['stats']])
145 c.included_files.append(f['filename'])
145 c.included_files.append(f['filename'])
146 html_diff = diff_processor.as_html(enable_comments=enable_comments,
146 html_diff = diff_processor.as_html(enable_comments=enable_comments,
147 parsed_lines=[f])
147 parsed_lines=[f])
148 c.changes[fid] = [f['operation'], f['filename'], html_diff, f]
148 c.changes[fid] = [f['operation'], f['filename'], html_diff, f]
149
149
150 def _extract_ordering(self, request):
150 def _extract_ordering(self, request):
151 column_index = safe_int(request.GET.get('order[0][column]'))
151 column_index = safe_int(request.GET.get('order[0][column]'))
152 order_dir = request.GET.get('order[0][dir]', 'desc')
152 order_dir = request.GET.get('order[0][dir]', 'desc')
153 order_by = request.GET.get(
153 order_by = request.GET.get(
154 'columns[%s][data][sort]' % column_index, 'name_raw')
154 'columns[%s][data][sort]' % column_index, 'name_raw')
155 return order_by, order_dir
155 return order_by, order_dir
156
156
157 @LoginRequired()
157 @LoginRequired()
158 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
158 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
159 'repository.admin')
159 'repository.admin')
160 @HasAcceptedRepoType('git', 'hg')
160 @HasAcceptedRepoType('git', 'hg')
161 def show_all(self, repo_name):
161 def show_all(self, repo_name):
162 # filter types
162 # filter types
163 c.active = 'open'
163 c.active = 'open'
164 c.source = str2bool(request.GET.get('source'))
164 c.source = str2bool(request.GET.get('source'))
165 c.closed = str2bool(request.GET.get('closed'))
165 c.closed = str2bool(request.GET.get('closed'))
166 c.my = str2bool(request.GET.get('my'))
166 c.my = str2bool(request.GET.get('my'))
167 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
167 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
168 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
168 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
169 c.repo_name = repo_name
169 c.repo_name = repo_name
170
170
171 opened_by = None
171 opened_by = None
172 if c.my:
172 if c.my:
173 c.active = 'my'
173 c.active = 'my'
174 opened_by = [c.rhodecode_user.user_id]
174 opened_by = [c.rhodecode_user.user_id]
175
175
176 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
176 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
177 if c.closed:
177 if c.closed:
178 c.active = 'closed'
178 c.active = 'closed'
179 statuses = [PullRequest.STATUS_CLOSED]
179 statuses = [PullRequest.STATUS_CLOSED]
180
180
181 if c.awaiting_review and not c.source:
181 if c.awaiting_review and not c.source:
182 c.active = 'awaiting'
182 c.active = 'awaiting'
183 if c.source and not c.awaiting_review:
183 if c.source and not c.awaiting_review:
184 c.active = 'source'
184 c.active = 'source'
185 if c.awaiting_my_review:
185 if c.awaiting_my_review:
186 c.active = 'awaiting_my'
186 c.active = 'awaiting_my'
187
187
188 data = self._get_pull_requests_list(
188 data = self._get_pull_requests_list(
189 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
189 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
190 if not request.is_xhr:
190 if not request.is_xhr:
191 c.data = json.dumps(data['data'])
191 c.data = json.dumps(data['data'])
192 c.records_total = data['recordsTotal']
192 c.records_total = data['recordsTotal']
193 return render('/pullrequests/pullrequests.html')
193 return render('/pullrequests/pullrequests.html')
194 else:
194 else:
195 return json.dumps(data)
195 return json.dumps(data)
196
196
197 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
197 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
198 # pagination
198 # pagination
199 start = safe_int(request.GET.get('start'), 0)
199 start = safe_int(request.GET.get('start'), 0)
200 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
200 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
201 order_by, order_dir = self._extract_ordering(request)
201 order_by, order_dir = self._extract_ordering(request)
202
202
203 if c.awaiting_review:
203 if c.awaiting_review:
204 pull_requests = PullRequestModel().get_awaiting_review(
204 pull_requests = PullRequestModel().get_awaiting_review(
205 repo_name, source=c.source, opened_by=opened_by,
205 repo_name, source=c.source, opened_by=opened_by,
206 statuses=statuses, offset=start, length=length,
206 statuses=statuses, offset=start, length=length,
207 order_by=order_by, order_dir=order_dir)
207 order_by=order_by, order_dir=order_dir)
208 pull_requests_total_count = PullRequestModel(
208 pull_requests_total_count = PullRequestModel(
209 ).count_awaiting_review(
209 ).count_awaiting_review(
210 repo_name, source=c.source, statuses=statuses,
210 repo_name, source=c.source, statuses=statuses,
211 opened_by=opened_by)
211 opened_by=opened_by)
212 elif c.awaiting_my_review:
212 elif c.awaiting_my_review:
213 pull_requests = PullRequestModel().get_awaiting_my_review(
213 pull_requests = PullRequestModel().get_awaiting_my_review(
214 repo_name, source=c.source, opened_by=opened_by,
214 repo_name, source=c.source, opened_by=opened_by,
215 user_id=c.rhodecode_user.user_id, statuses=statuses,
215 user_id=c.rhodecode_user.user_id, statuses=statuses,
216 offset=start, length=length, order_by=order_by,
216 offset=start, length=length, order_by=order_by,
217 order_dir=order_dir)
217 order_dir=order_dir)
218 pull_requests_total_count = PullRequestModel(
218 pull_requests_total_count = PullRequestModel(
219 ).count_awaiting_my_review(
219 ).count_awaiting_my_review(
220 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
220 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
221 statuses=statuses, opened_by=opened_by)
221 statuses=statuses, opened_by=opened_by)
222 else:
222 else:
223 pull_requests = PullRequestModel().get_all(
223 pull_requests = PullRequestModel().get_all(
224 repo_name, source=c.source, opened_by=opened_by,
224 repo_name, source=c.source, opened_by=opened_by,
225 statuses=statuses, offset=start, length=length,
225 statuses=statuses, offset=start, length=length,
226 order_by=order_by, order_dir=order_dir)
226 order_by=order_by, order_dir=order_dir)
227 pull_requests_total_count = PullRequestModel().count_all(
227 pull_requests_total_count = PullRequestModel().count_all(
228 repo_name, source=c.source, statuses=statuses,
228 repo_name, source=c.source, statuses=statuses,
229 opened_by=opened_by)
229 opened_by=opened_by)
230
230
231 from rhodecode.lib.utils import PartialRenderer
231 from rhodecode.lib.utils import PartialRenderer
232 _render = PartialRenderer('data_table/_dt_elements.html')
232 _render = PartialRenderer('data_table/_dt_elements.html')
233 data = []
233 data = []
234 for pr in pull_requests:
234 for pr in pull_requests:
235 comments = ChangesetCommentsModel().get_all_comments(
235 comments = ChangesetCommentsModel().get_all_comments(
236 c.rhodecode_db_repo.repo_id, pull_request=pr)
236 c.rhodecode_db_repo.repo_id, pull_request=pr)
237
237
238 data.append({
238 data.append({
239 'name': _render('pullrequest_name',
239 'name': _render('pullrequest_name',
240 pr.pull_request_id, pr.target_repo.repo_name),
240 pr.pull_request_id, pr.target_repo.repo_name),
241 'name_raw': pr.pull_request_id,
241 'name_raw': pr.pull_request_id,
242 'status': _render('pullrequest_status',
242 'status': _render('pullrequest_status',
243 pr.calculated_review_status()),
243 pr.calculated_review_status()),
244 'title': _render(
244 'title': _render(
245 'pullrequest_title', pr.title, pr.description),
245 'pullrequest_title', pr.title, pr.description),
246 'description': h.escape(pr.description),
246 'description': h.escape(pr.description),
247 'updated_on': _render('pullrequest_updated_on',
247 'updated_on': _render('pullrequest_updated_on',
248 h.datetime_to_time(pr.updated_on)),
248 h.datetime_to_time(pr.updated_on)),
249 'updated_on_raw': h.datetime_to_time(pr.updated_on),
249 'updated_on_raw': h.datetime_to_time(pr.updated_on),
250 'created_on': _render('pullrequest_updated_on',
250 'created_on': _render('pullrequest_updated_on',
251 h.datetime_to_time(pr.created_on)),
251 h.datetime_to_time(pr.created_on)),
252 'created_on_raw': h.datetime_to_time(pr.created_on),
252 'created_on_raw': h.datetime_to_time(pr.created_on),
253 'author': _render('pullrequest_author',
253 'author': _render('pullrequest_author',
254 pr.author.full_contact, ),
254 pr.author.full_contact, ),
255 'author_raw': pr.author.full_name,
255 'author_raw': pr.author.full_name,
256 'comments': _render('pullrequest_comments', len(comments)),
256 'comments': _render('pullrequest_comments', len(comments)),
257 'comments_raw': len(comments),
257 'comments_raw': len(comments),
258 'closed': pr.is_closed(),
258 'closed': pr.is_closed(),
259 })
259 })
260 # json used to render the grid
260 # json used to render the grid
261 data = ({
261 data = ({
262 'data': data,
262 'data': data,
263 'recordsTotal': pull_requests_total_count,
263 'recordsTotal': pull_requests_total_count,
264 'recordsFiltered': pull_requests_total_count,
264 'recordsFiltered': pull_requests_total_count,
265 })
265 })
266 return data
266 return data
267
267
268 @LoginRequired()
268 @LoginRequired()
269 @NotAnonymous()
269 @NotAnonymous()
270 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
270 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
271 'repository.admin')
271 'repository.admin')
272 @HasAcceptedRepoType('git', 'hg')
272 @HasAcceptedRepoType('git', 'hg')
273 def index(self):
273 def index(self):
274 source_repo = c.rhodecode_db_repo
274 source_repo = c.rhodecode_db_repo
275
275
276 try:
276 try:
277 source_repo.scm_instance().get_commit()
277 source_repo.scm_instance().get_commit()
278 except EmptyRepositoryError:
278 except EmptyRepositoryError:
279 h.flash(h.literal(_('There are no commits yet')),
279 h.flash(h.literal(_('There are no commits yet')),
280 category='warning')
280 category='warning')
281 redirect(url('summary_home', repo_name=source_repo.repo_name))
281 redirect(url('summary_home', repo_name=source_repo.repo_name))
282
282
283 commit_id = request.GET.get('commit')
283 commit_id = request.GET.get('commit')
284 branch_ref = request.GET.get('branch')
284 branch_ref = request.GET.get('branch')
285 bookmark_ref = request.GET.get('bookmark')
285 bookmark_ref = request.GET.get('bookmark')
286
286
287 try:
287 try:
288 source_repo_data = PullRequestModel().generate_repo_data(
288 source_repo_data = PullRequestModel().generate_repo_data(
289 source_repo, commit_id=commit_id,
289 source_repo, commit_id=commit_id,
290 branch=branch_ref, bookmark=bookmark_ref)
290 branch=branch_ref, bookmark=bookmark_ref)
291 except CommitDoesNotExistError as e:
291 except CommitDoesNotExistError as e:
292 log.exception(e)
292 log.exception(e)
293 h.flash(_('Commit does not exist'), 'error')
293 h.flash(_('Commit does not exist'), 'error')
294 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
294 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
295
295
296 default_target_repo = source_repo
296 default_target_repo = source_repo
297 if (source_repo.parent and
297 if (source_repo.parent and
298 not source_repo.parent.scm_instance().is_empty()):
298 not source_repo.parent.scm_instance().is_empty()):
299 # change default if we have a parent repo
299 # change default if we have a parent repo
300 default_target_repo = source_repo.parent
300 default_target_repo = source_repo.parent
301
301
302 target_repo_data = PullRequestModel().generate_repo_data(
302 target_repo_data = PullRequestModel().generate_repo_data(
303 default_target_repo)
303 default_target_repo)
304
304
305 selected_source_ref = source_repo_data['refs']['selected_ref']
305 selected_source_ref = source_repo_data['refs']['selected_ref']
306
306
307 title_source_ref = selected_source_ref.split(':', 2)[1]
307 title_source_ref = selected_source_ref.split(':', 2)[1]
308 c.default_title = PullRequestModel().generate_pullrequest_title(
308 c.default_title = PullRequestModel().generate_pullrequest_title(
309 source=source_repo.repo_name,
309 source=source_repo.repo_name,
310 source_ref=title_source_ref,
310 source_ref=title_source_ref,
311 target=default_target_repo.repo_name
311 target=default_target_repo.repo_name
312 )
312 )
313
313
314 c.default_repo_data = {
314 c.default_repo_data = {
315 'source_repo_name': source_repo.repo_name,
315 'source_repo_name': source_repo.repo_name,
316 'source_refs_json': json.dumps(source_repo_data),
316 'source_refs_json': json.dumps(source_repo_data),
317 'target_repo_name': default_target_repo.repo_name,
317 'target_repo_name': default_target_repo.repo_name,
318 'target_refs_json': json.dumps(target_repo_data),
318 'target_refs_json': json.dumps(target_repo_data),
319 }
319 }
320 c.default_source_ref = selected_source_ref
320 c.default_source_ref = selected_source_ref
321
321
322 return render('/pullrequests/pullrequest.html')
322 return render('/pullrequests/pullrequest.html')
323
323
324 @LoginRequired()
324 @LoginRequired()
325 @NotAnonymous()
325 @NotAnonymous()
326 @XHRRequired()
326 @XHRRequired()
327 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
327 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
328 'repository.admin')
328 'repository.admin')
329 @jsonify
329 @jsonify
330 def get_repo_refs(self, repo_name, target_repo_name):
330 def get_repo_refs(self, repo_name, target_repo_name):
331 repo = Repository.get_by_repo_name(target_repo_name)
331 repo = Repository.get_by_repo_name(target_repo_name)
332 if not repo:
332 if not repo:
333 raise HTTPNotFound
333 raise HTTPNotFound
334 return PullRequestModel().generate_repo_data(repo)
334 return PullRequestModel().generate_repo_data(repo)
335
335
336 @LoginRequired()
336 @LoginRequired()
337 @NotAnonymous()
337 @NotAnonymous()
338 @XHRRequired()
338 @XHRRequired()
339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
340 'repository.admin')
340 'repository.admin')
341 @jsonify
341 @jsonify
342 def get_repo_destinations(self, repo_name):
342 def get_repo_destinations(self, repo_name):
343 repo = Repository.get_by_repo_name(repo_name)
343 repo = Repository.get_by_repo_name(repo_name)
344 if not repo:
344 if not repo:
345 raise HTTPNotFound
345 raise HTTPNotFound
346 filter_query = request.GET.get('query')
346 filter_query = request.GET.get('query')
347
347
348 query = Repository.query() \
348 query = Repository.query() \
349 .order_by(func.length(Repository.repo_name)) \
349 .order_by(func.length(Repository.repo_name)) \
350 .filter(or_(
350 .filter(or_(
351 Repository.repo_name == repo.repo_name,
351 Repository.repo_name == repo.repo_name,
352 Repository.fork_id == repo.repo_id))
352 Repository.fork_id == repo.repo_id))
353
353
354 if filter_query:
354 if filter_query:
355 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
355 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
356 query = query.filter(
356 query = query.filter(
357 Repository.repo_name.ilike(ilike_expression))
357 Repository.repo_name.ilike(ilike_expression))
358
358
359 add_parent = False
359 add_parent = False
360 if repo.parent:
360 if repo.parent:
361 if filter_query in repo.parent.repo_name:
361 if filter_query in repo.parent.repo_name:
362 if not repo.parent.scm_instance().is_empty():
362 if not repo.parent.scm_instance().is_empty():
363 add_parent = True
363 add_parent = True
364
364
365 limit = 20 - 1 if add_parent else 20
365 limit = 20 - 1 if add_parent else 20
366 all_repos = query.limit(limit).all()
366 all_repos = query.limit(limit).all()
367 if add_parent:
367 if add_parent:
368 all_repos += [repo.parent]
368 all_repos += [repo.parent]
369
369
370 repos = []
370 repos = []
371 for obj in self.scm_model.get_repos(all_repos):
371 for obj in self.scm_model.get_repos(all_repos):
372 repos.append({
372 repos.append({
373 'id': obj['name'],
373 'id': obj['name'],
374 'text': obj['name'],
374 'text': obj['name'],
375 'type': 'repo',
375 'type': 'repo',
376 'obj': obj['dbrepo']
376 'obj': obj['dbrepo']
377 })
377 })
378
378
379 data = {
379 data = {
380 'more': False,
380 'more': False,
381 'results': [{
381 'results': [{
382 'text': _('Repositories'),
382 'text': _('Repositories'),
383 'children': repos
383 'children': repos
384 }] if repos else []
384 }] if repos else []
385 }
385 }
386 return data
386 return data
387
387
388 @LoginRequired()
388 @LoginRequired()
389 @NotAnonymous()
389 @NotAnonymous()
390 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
390 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
391 'repository.admin')
391 'repository.admin')
392 @HasAcceptedRepoType('git', 'hg')
392 @HasAcceptedRepoType('git', 'hg')
393 @auth.CSRFRequired()
393 @auth.CSRFRequired()
394 def create(self, repo_name):
394 def create(self, repo_name):
395 repo = Repository.get_by_repo_name(repo_name)
395 repo = Repository.get_by_repo_name(repo_name)
396 if not repo:
396 if not repo:
397 raise HTTPNotFound
397 raise HTTPNotFound
398
398
399 try:
399 try:
400 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
400 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
401 except formencode.Invalid as errors:
401 except formencode.Invalid as errors:
402 if errors.error_dict.get('revisions'):
402 if errors.error_dict.get('revisions'):
403 msg = 'Revisions: %s' % errors.error_dict['revisions']
403 msg = 'Revisions: %s' % errors.error_dict['revisions']
404 elif errors.error_dict.get('pullrequest_title'):
404 elif errors.error_dict.get('pullrequest_title'):
405 msg = _('Pull request requires a title with min. 3 chars')
405 msg = _('Pull request requires a title with min. 3 chars')
406 else:
406 else:
407 msg = _('Error creating pull request: {}').format(errors)
407 msg = _('Error creating pull request: {}').format(errors)
408 log.exception(msg)
408 log.exception(msg)
409 h.flash(msg, 'error')
409 h.flash(msg, 'error')
410
410
411 # would rather just go back to form ...
411 # would rather just go back to form ...
412 return redirect(url('pullrequest_home', repo_name=repo_name))
412 return redirect(url('pullrequest_home', repo_name=repo_name))
413
413
414 source_repo = _form['source_repo']
414 source_repo = _form['source_repo']
415 source_ref = _form['source_ref']
415 source_ref = _form['source_ref']
416 target_repo = _form['target_repo']
416 target_repo = _form['target_repo']
417 target_ref = _form['target_ref']
417 target_ref = _form['target_ref']
418 commit_ids = _form['revisions'][::-1]
418 commit_ids = _form['revisions'][::-1]
419 reviewers = _form['review_members']
419 reviewers = _form['review_members']
420
420
421 # find the ancestor for this pr
421 # find the ancestor for this pr
422 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
422 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
423 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
423 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
424
424
425 source_scm = source_db_repo.scm_instance()
425 source_scm = source_db_repo.scm_instance()
426 target_scm = target_db_repo.scm_instance()
426 target_scm = target_db_repo.scm_instance()
427
427
428 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
428 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
429 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
429 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
430
430
431 ancestor = source_scm.get_common_ancestor(
431 ancestor = source_scm.get_common_ancestor(
432 source_commit.raw_id, target_commit.raw_id, target_scm)
432 source_commit.raw_id, target_commit.raw_id, target_scm)
433
433
434 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
434 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
435 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
435 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
436
436
437 pullrequest_title = _form['pullrequest_title']
437 pullrequest_title = _form['pullrequest_title']
438 title_source_ref = source_ref.split(':', 2)[1]
438 title_source_ref = source_ref.split(':', 2)[1]
439 if not pullrequest_title:
439 if not pullrequest_title:
440 pullrequest_title = PullRequestModel().generate_pullrequest_title(
440 pullrequest_title = PullRequestModel().generate_pullrequest_title(
441 source=source_repo,
441 source=source_repo,
442 source_ref=title_source_ref,
442 source_ref=title_source_ref,
443 target=target_repo
443 target=target_repo
444 )
444 )
445
445
446 description = _form['pullrequest_desc']
446 description = _form['pullrequest_desc']
447 try:
447 try:
448 pull_request = PullRequestModel().create(
448 pull_request = PullRequestModel().create(
449 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
449 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
450 target_ref, commit_ids, reviewers, pullrequest_title,
450 target_ref, commit_ids, reviewers, pullrequest_title,
451 description
451 description
452 )
452 )
453 Session().commit()
453 Session().commit()
454 h.flash(_('Successfully opened new pull request'),
454 h.flash(_('Successfully opened new pull request'),
455 category='success')
455 category='success')
456 except Exception as e:
456 except Exception as e:
457 raise
457 msg = _('Error occurred during sending pull request')
458 msg = _('Error occurred during sending pull request')
458 log.exception(msg)
459 log.exception(msg)
459 h.flash(msg, category='error')
460 h.flash(msg, category='error')
460 return redirect(url('pullrequest_home', repo_name=repo_name))
461 return redirect(url('pullrequest_home', repo_name=repo_name))
461
462
462 return redirect(url('pullrequest_show', repo_name=target_repo,
463 return redirect(url('pullrequest_show', repo_name=target_repo,
463 pull_request_id=pull_request.pull_request_id))
464 pull_request_id=pull_request.pull_request_id))
464
465
465 @LoginRequired()
466 @LoginRequired()
466 @NotAnonymous()
467 @NotAnonymous()
467 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
468 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
468 'repository.admin')
469 'repository.admin')
469 @auth.CSRFRequired()
470 @auth.CSRFRequired()
470 @jsonify
471 @jsonify
471 def update(self, repo_name, pull_request_id):
472 def update(self, repo_name, pull_request_id):
472 pull_request_id = safe_int(pull_request_id)
473 pull_request_id = safe_int(pull_request_id)
473 pull_request = PullRequest.get_or_404(pull_request_id)
474 pull_request = PullRequest.get_or_404(pull_request_id)
474 # only owner or admin can update it
475 # only owner or admin can update it
475 allowed_to_update = PullRequestModel().check_user_update(
476 allowed_to_update = PullRequestModel().check_user_update(
476 pull_request, c.rhodecode_user)
477 pull_request, c.rhodecode_user)
477 if allowed_to_update:
478 if allowed_to_update:
478 if 'reviewers_ids' in request.POST:
479 if 'reviewers_ids' in request.POST:
479 self._update_reviewers(pull_request_id)
480 self._update_reviewers(pull_request_id)
480 elif str2bool(request.POST.get('update_commits', 'false')):
481 elif str2bool(request.POST.get('update_commits', 'false')):
481 self._update_commits(pull_request)
482 self._update_commits(pull_request)
482 elif str2bool(request.POST.get('close_pull_request', 'false')):
483 elif str2bool(request.POST.get('close_pull_request', 'false')):
483 self._reject_close(pull_request)
484 self._reject_close(pull_request)
484 elif str2bool(request.POST.get('edit_pull_request', 'false')):
485 elif str2bool(request.POST.get('edit_pull_request', 'false')):
485 self._edit_pull_request(pull_request)
486 self._edit_pull_request(pull_request)
486 else:
487 else:
487 raise HTTPBadRequest()
488 raise HTTPBadRequest()
488 return True
489 return True
489 raise HTTPForbidden()
490 raise HTTPForbidden()
490
491
491 def _edit_pull_request(self, pull_request):
492 def _edit_pull_request(self, pull_request):
492 try:
493 try:
493 PullRequestModel().edit(
494 PullRequestModel().edit(
494 pull_request, request.POST.get('title'),
495 pull_request, request.POST.get('title'),
495 request.POST.get('description'))
496 request.POST.get('description'))
496 except ValueError:
497 except ValueError:
497 msg = _(u'Cannot update closed pull requests.')
498 msg = _(u'Cannot update closed pull requests.')
498 h.flash(msg, category='error')
499 h.flash(msg, category='error')
499 return
500 return
500 else:
501 else:
501 Session().commit()
502 Session().commit()
502
503
503 msg = _(u'Pull request title & description updated.')
504 msg = _(u'Pull request title & description updated.')
504 h.flash(msg, category='success')
505 h.flash(msg, category='success')
505 return
506 return
506
507
507 def _update_commits(self, pull_request):
508 def _update_commits(self, pull_request):
508 try:
509 try:
509 if PullRequestModel().has_valid_update_type(pull_request):
510 if PullRequestModel().has_valid_update_type(pull_request):
510 updated_version, changes = PullRequestModel().update_commits(
511 updated_version, changes = PullRequestModel().update_commits(
511 pull_request)
512 pull_request)
512 if updated_version:
513 if updated_version:
513 msg = _(
514 msg = _(
514 u'Pull request updated to "{source_commit_id}" with '
515 u'Pull request updated to "{source_commit_id}" with '
515 u'{count_added} added, {count_removed} removed '
516 u'{count_added} added, {count_removed} removed '
516 u'commits.'
517 u'commits.'
517 ).format(
518 ).format(
518 source_commit_id=pull_request.source_ref_parts.commit_id,
519 source_commit_id=pull_request.source_ref_parts.commit_id,
519 count_added=len(changes.added),
520 count_added=len(changes.added),
520 count_removed=len(changes.removed))
521 count_removed=len(changes.removed))
521 h.flash(msg, category='success')
522 h.flash(msg, category='success')
522 else:
523 else:
523 h.flash(_("Nothing changed in pull request."),
524 h.flash(_("Nothing changed in pull request."),
524 category='warning')
525 category='warning')
525 else:
526 else:
526 msg = _(
527 msg = _(
527 u"Skipping update of pull request due to reference "
528 u"Skipping update of pull request due to reference "
528 u"type: {reference_type}"
529 u"type: {reference_type}"
529 ).format(reference_type=pull_request.source_ref_parts.type)
530 ).format(reference_type=pull_request.source_ref_parts.type)
530 h.flash(msg, category='warning')
531 h.flash(msg, category='warning')
531 except CommitDoesNotExistError:
532 except CommitDoesNotExistError:
532 h.flash(
533 h.flash(
533 _(u'Update failed due to missing commits.'), category='error')
534 _(u'Update failed due to missing commits.'), category='error')
534
535
535 @auth.CSRFRequired()
536 @auth.CSRFRequired()
536 @LoginRequired()
537 @LoginRequired()
537 @NotAnonymous()
538 @NotAnonymous()
538 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
539 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
539 'repository.admin')
540 'repository.admin')
540 def merge(self, repo_name, pull_request_id):
541 def merge(self, repo_name, pull_request_id):
541 """
542 """
542 POST /{repo_name}/pull-request/{pull_request_id}
543 POST /{repo_name}/pull-request/{pull_request_id}
543
544
544 Merge will perform a server-side merge of the specified
545 Merge will perform a server-side merge of the specified
545 pull request, if the pull request is approved and mergeable.
546 pull request, if the pull request is approved and mergeable.
546 After succesfull merging, the pull request is automatically
547 After succesfull merging, the pull request is automatically
547 closed, with a relevant comment.
548 closed, with a relevant comment.
548 """
549 """
549 pull_request_id = safe_int(pull_request_id)
550 pull_request_id = safe_int(pull_request_id)
550 pull_request = PullRequest.get_or_404(pull_request_id)
551 pull_request = PullRequest.get_or_404(pull_request_id)
551 user = c.rhodecode_user
552 user = c.rhodecode_user
552
553
553 if self._meets_merge_pre_conditions(pull_request, user):
554 if self._meets_merge_pre_conditions(pull_request, user):
554 log.debug("Pre-conditions checked, trying to merge.")
555 log.debug("Pre-conditions checked, trying to merge.")
555 extras = vcs_operation_context(
556 extras = vcs_operation_context(
556 request.environ, repo_name=pull_request.target_repo.repo_name,
557 request.environ, repo_name=pull_request.target_repo.repo_name,
557 username=user.username, action='push',
558 username=user.username, action='push',
558 scm=pull_request.target_repo.repo_type)
559 scm=pull_request.target_repo.repo_type)
559 self._merge_pull_request(pull_request, user, extras)
560 self._merge_pull_request(pull_request, user, extras)
560
561
561 return redirect(url(
562 return redirect(url(
562 'pullrequest_show',
563 'pullrequest_show',
563 repo_name=pull_request.target_repo.repo_name,
564 repo_name=pull_request.target_repo.repo_name,
564 pull_request_id=pull_request.pull_request_id))
565 pull_request_id=pull_request.pull_request_id))
565
566
566 def _meets_merge_pre_conditions(self, pull_request, user):
567 def _meets_merge_pre_conditions(self, pull_request, user):
567 if not PullRequestModel().check_user_merge(pull_request, user):
568 if not PullRequestModel().check_user_merge(pull_request, user):
568 raise HTTPForbidden()
569 raise HTTPForbidden()
569
570
570 merge_status, msg = PullRequestModel().merge_status(pull_request)
571 merge_status, msg = PullRequestModel().merge_status(pull_request)
571 if not merge_status:
572 if not merge_status:
572 log.debug("Cannot merge, not mergeable.")
573 log.debug("Cannot merge, not mergeable.")
573 h.flash(msg, category='error')
574 h.flash(msg, category='error')
574 return False
575 return False
575
576
576 if (pull_request.calculated_review_status()
577 if (pull_request.calculated_review_status()
577 is not ChangesetStatus.STATUS_APPROVED):
578 is not ChangesetStatus.STATUS_APPROVED):
578 log.debug("Cannot merge, approval is pending.")
579 log.debug("Cannot merge, approval is pending.")
579 msg = _('Pull request reviewer approval is pending.')
580 msg = _('Pull request reviewer approval is pending.')
580 h.flash(msg, category='error')
581 h.flash(msg, category='error')
581 return False
582 return False
582 return True
583 return True
583
584
584 def _merge_pull_request(self, pull_request, user, extras):
585 def _merge_pull_request(self, pull_request, user, extras):
585 merge_resp = PullRequestModel().merge(
586 merge_resp = PullRequestModel().merge(
586 pull_request, user, extras=extras)
587 pull_request, user, extras=extras)
587
588
588 if merge_resp.executed:
589 if merge_resp.executed:
589 log.debug("The merge was successful, closing the pull request.")
590 log.debug("The merge was successful, closing the pull request.")
590 PullRequestModel().close_pull_request(
591 PullRequestModel().close_pull_request(
591 pull_request.pull_request_id, user)
592 pull_request.pull_request_id, user)
592 Session().commit()
593 Session().commit()
593 msg = _('Pull request was successfully merged and closed.')
594 msg = _('Pull request was successfully merged and closed.')
594 h.flash(msg, category='success')
595 h.flash(msg, category='success')
595 else:
596 else:
596 log.debug(
597 log.debug(
597 "The merge was not successful. Merge response: %s",
598 "The merge was not successful. Merge response: %s",
598 merge_resp)
599 merge_resp)
599 msg = PullRequestModel().merge_status_message(
600 msg = PullRequestModel().merge_status_message(
600 merge_resp.failure_reason)
601 merge_resp.failure_reason)
601 h.flash(msg, category='error')
602 h.flash(msg, category='error')
602
603
603 def _update_reviewers(self, pull_request_id):
604 def _update_reviewers(self, pull_request_id):
604 reviewers_ids = map(int, filter(
605 reviewers_ids = map(int, filter(
605 lambda v: v not in [None, ''],
606 lambda v: v not in [None, ''],
606 request.POST.get('reviewers_ids', '').split(',')))
607 request.POST.get('reviewers_ids', '').split(',')))
607 PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
608 PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
608 Session().commit()
609 Session().commit()
609
610
610 def _reject_close(self, pull_request):
611 def _reject_close(self, pull_request):
611 if pull_request.is_closed():
612 if pull_request.is_closed():
612 raise HTTPForbidden()
613 raise HTTPForbidden()
613
614
614 PullRequestModel().close_pull_request_with_comment(
615 PullRequestModel().close_pull_request_with_comment(
615 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
616 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
616 Session().commit()
617 Session().commit()
617
618
618 @LoginRequired()
619 @LoginRequired()
619 @NotAnonymous()
620 @NotAnonymous()
620 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
621 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
621 'repository.admin')
622 'repository.admin')
622 @auth.CSRFRequired()
623 @auth.CSRFRequired()
623 @jsonify
624 @jsonify
624 def delete(self, repo_name, pull_request_id):
625 def delete(self, repo_name, pull_request_id):
625 pull_request_id = safe_int(pull_request_id)
626 pull_request_id = safe_int(pull_request_id)
626 pull_request = PullRequest.get_or_404(pull_request_id)
627 pull_request = PullRequest.get_or_404(pull_request_id)
627 # only owner can delete it !
628 # only owner can delete it !
628 if pull_request.author.user_id == c.rhodecode_user.user_id:
629 if pull_request.author.user_id == c.rhodecode_user.user_id:
629 PullRequestModel().delete(pull_request)
630 PullRequestModel().delete(pull_request)
630 Session().commit()
631 Session().commit()
631 h.flash(_('Successfully deleted pull request'),
632 h.flash(_('Successfully deleted pull request'),
632 category='success')
633 category='success')
633 return redirect(url('my_account_pullrequests'))
634 return redirect(url('my_account_pullrequests'))
634 raise HTTPForbidden()
635 raise HTTPForbidden()
635
636
636 @LoginRequired()
637 @LoginRequired()
637 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
638 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
638 'repository.admin')
639 'repository.admin')
639 def show(self, repo_name, pull_request_id):
640 def show(self, repo_name, pull_request_id):
640 pull_request_id = safe_int(pull_request_id)
641 pull_request_id = safe_int(pull_request_id)
641 c.pull_request = PullRequest.get_or_404(pull_request_id)
642 c.pull_request = PullRequest.get_or_404(pull_request_id)
642
643
643 # pull_requests repo_name we opened it against
644 # pull_requests repo_name we opened it against
644 # ie. target_repo must match
645 # ie. target_repo must match
645 if repo_name != c.pull_request.target_repo.repo_name:
646 if repo_name != c.pull_request.target_repo.repo_name:
646 raise HTTPNotFound
647 raise HTTPNotFound
647
648
648 c.allowed_to_change_status = PullRequestModel(). \
649 c.allowed_to_change_status = PullRequestModel(). \
649 check_user_change_status(c.pull_request, c.rhodecode_user)
650 check_user_change_status(c.pull_request, c.rhodecode_user)
650 c.allowed_to_update = PullRequestModel().check_user_update(
651 c.allowed_to_update = PullRequestModel().check_user_update(
651 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
652 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
652 c.allowed_to_merge = PullRequestModel().check_user_merge(
653 c.allowed_to_merge = PullRequestModel().check_user_merge(
653 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
654 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
654
655
655 cc_model = ChangesetCommentsModel()
656 cc_model = ChangesetCommentsModel()
656
657
657 c.pull_request_reviewers = c.pull_request.reviewers_statuses()
658 c.pull_request_reviewers = c.pull_request.reviewers_statuses()
658
659
659 c.pull_request_review_status = c.pull_request.calculated_review_status()
660 c.pull_request_review_status = c.pull_request.calculated_review_status()
660 c.pr_merge_status, c.pr_merge_msg = PullRequestModel().merge_status(
661 c.pr_merge_status, c.pr_merge_msg = PullRequestModel().merge_status(
661 c.pull_request)
662 c.pull_request)
662 c.approval_msg = None
663 c.approval_msg = None
663 if c.pull_request_review_status != ChangesetStatus.STATUS_APPROVED:
664 if c.pull_request_review_status != ChangesetStatus.STATUS_APPROVED:
664 c.approval_msg = _('Reviewer approval is pending.')
665 c.approval_msg = _('Reviewer approval is pending.')
665 c.pr_merge_status = False
666 c.pr_merge_status = False
666 # load compare data into template context
667 # load compare data into template context
667 enable_comments = not c.pull_request.is_closed()
668 enable_comments = not c.pull_request.is_closed()
668 self._load_compare_data(c.pull_request, enable_comments=enable_comments)
669 self._load_compare_data(c.pull_request, enable_comments=enable_comments)
669
670
670 # this is a hack to properly display links, when creating PR, the
671 # this is a hack to properly display links, when creating PR, the
671 # compare view and others uses different notation, and
672 # compare view and others uses different notation, and
672 # compare_commits.html renders links based on the target_repo.
673 # compare_commits.html renders links based on the target_repo.
673 # We need to swap that here to generate it properly on the html side
674 # We need to swap that here to generate it properly on the html side
674 c.target_repo = c.source_repo
675 c.target_repo = c.source_repo
675
676
676 # inline comments
677 # inline comments
677 c.inline_cnt = 0
678 c.inline_cnt = 0
678 c.inline_comments = cc_model.get_inline_comments(
679 c.inline_comments = cc_model.get_inline_comments(
679 c.rhodecode_db_repo.repo_id,
680 c.rhodecode_db_repo.repo_id,
680 pull_request=pull_request_id).items()
681 pull_request=pull_request_id).items()
681 # count inline comments
682 # count inline comments
682 for __, lines in c.inline_comments:
683 for __, lines in c.inline_comments:
683 for comments in lines.values():
684 for comments in lines.values():
684 c.inline_cnt += len(comments)
685 c.inline_cnt += len(comments)
685
686
686 # outdated comments
687 # outdated comments
687 c.outdated_cnt = 0
688 c.outdated_cnt = 0
688 if ChangesetCommentsModel.use_outdated_comments(c.pull_request):
689 if ChangesetCommentsModel.use_outdated_comments(c.pull_request):
689 c.outdated_comments = cc_model.get_outdated_comments(
690 c.outdated_comments = cc_model.get_outdated_comments(
690 c.rhodecode_db_repo.repo_id,
691 c.rhodecode_db_repo.repo_id,
691 pull_request=c.pull_request)
692 pull_request=c.pull_request)
692 # Count outdated comments and check for deleted files
693 # Count outdated comments and check for deleted files
693 for file_name, lines in c.outdated_comments.iteritems():
694 for file_name, lines in c.outdated_comments.iteritems():
694 for comments in lines.values():
695 for comments in lines.values():
695 c.outdated_cnt += len(comments)
696 c.outdated_cnt += len(comments)
696 if file_name not in c.included_files:
697 if file_name not in c.included_files:
697 c.deleted_files.append(file_name)
698 c.deleted_files.append(file_name)
698 else:
699 else:
699 c.outdated_comments = {}
700 c.outdated_comments = {}
700
701
701 # comments
702 # comments
702 c.comments = cc_model.get_comments(c.rhodecode_db_repo.repo_id,
703 c.comments = cc_model.get_comments(c.rhodecode_db_repo.repo_id,
703 pull_request=pull_request_id)
704 pull_request=pull_request_id)
704
705
705 if c.allowed_to_update:
706 if c.allowed_to_update:
706 force_close = ('forced_closed', _('Close Pull Request'))
707 force_close = ('forced_closed', _('Close Pull Request'))
707 statuses = ChangesetStatus.STATUSES + [force_close]
708 statuses = ChangesetStatus.STATUSES + [force_close]
708 else:
709 else:
709 statuses = ChangesetStatus.STATUSES
710 statuses = ChangesetStatus.STATUSES
710 c.commit_statuses = statuses
711 c.commit_statuses = statuses
711
712
712 c.ancestor = None # TODO: add ancestor here
713 c.ancestor = None # TODO: add ancestor here
713
714
714 return render('/pullrequests/pullrequest_show.html')
715 return render('/pullrequests/pullrequest_show.html')
715
716
716 @LoginRequired()
717 @LoginRequired()
717 @NotAnonymous()
718 @NotAnonymous()
718 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
719 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
719 'repository.admin')
720 'repository.admin')
720 @auth.CSRFRequired()
721 @auth.CSRFRequired()
721 @jsonify
722 @jsonify
722 def comment(self, repo_name, pull_request_id):
723 def comment(self, repo_name, pull_request_id):
723 pull_request_id = safe_int(pull_request_id)
724 pull_request_id = safe_int(pull_request_id)
724 pull_request = PullRequest.get_or_404(pull_request_id)
725 pull_request = PullRequest.get_or_404(pull_request_id)
725 if pull_request.is_closed():
726 if pull_request.is_closed():
726 raise HTTPForbidden()
727 raise HTTPForbidden()
727
728
728 # TODO: johbo: Re-think this bit, "approved_closed" does not exist
729 # TODO: johbo: Re-think this bit, "approved_closed" does not exist
729 # as a changeset status, still we want to send it in one value.
730 # as a changeset status, still we want to send it in one value.
730 status = request.POST.get('changeset_status', None)
731 status = request.POST.get('changeset_status', None)
731 text = request.POST.get('text')
732 text = request.POST.get('text')
732 if status and '_closed' in status:
733 if status and '_closed' in status:
733 close_pr = True
734 close_pr = True
734 status = status.replace('_closed', '')
735 status = status.replace('_closed', '')
735 else:
736 else:
736 close_pr = False
737 close_pr = False
737
738
738 forced = (status == 'forced')
739 forced = (status == 'forced')
739 if forced:
740 if forced:
740 status = 'rejected'
741 status = 'rejected'
741
742
742 allowed_to_change_status = PullRequestModel().check_user_change_status(
743 allowed_to_change_status = PullRequestModel().check_user_change_status(
743 pull_request, c.rhodecode_user)
744 pull_request, c.rhodecode_user)
744
745
745 if status and allowed_to_change_status:
746 if status and allowed_to_change_status:
746 message = (_('Status change %(transition_icon)s %(status)s')
747 message = (_('Status change %(transition_icon)s %(status)s')
747 % {'transition_icon': '>',
748 % {'transition_icon': '>',
748 'status': ChangesetStatus.get_status_lbl(status)})
749 'status': ChangesetStatus.get_status_lbl(status)})
749 if close_pr:
750 if close_pr:
750 message = _('Closing with') + ' ' + message
751 message = _('Closing with') + ' ' + message
751 text = text or message
752 text = text or message
752 comm = ChangesetCommentsModel().create(
753 comm = ChangesetCommentsModel().create(
753 text=text,
754 text=text,
754 repo=c.rhodecode_db_repo.repo_id,
755 repo=c.rhodecode_db_repo.repo_id,
755 user=c.rhodecode_user.user_id,
756 user=c.rhodecode_user.user_id,
756 pull_request=pull_request_id,
757 pull_request=pull_request_id,
757 f_path=request.POST.get('f_path'),
758 f_path=request.POST.get('f_path'),
758 line_no=request.POST.get('line'),
759 line_no=request.POST.get('line'),
759 status_change=(ChangesetStatus.get_status_lbl(status)
760 status_change=(ChangesetStatus.get_status_lbl(status)
760 if status and allowed_to_change_status else None),
761 if status and allowed_to_change_status else None),
761 closing_pr=close_pr
762 closing_pr=close_pr
762 )
763 )
763
764
764 if allowed_to_change_status:
765 if allowed_to_change_status:
765 old_calculated_status = pull_request.calculated_review_status()
766 old_calculated_status = pull_request.calculated_review_status()
766 # get status if set !
767 # get status if set !
767 if status:
768 if status:
768 ChangesetStatusModel().set_status(
769 ChangesetStatusModel().set_status(
769 c.rhodecode_db_repo.repo_id,
770 c.rhodecode_db_repo.repo_id,
770 status,
771 status,
771 c.rhodecode_user.user_id,
772 c.rhodecode_user.user_id,
772 comm,
773 comm,
773 pull_request=pull_request_id
774 pull_request=pull_request_id
774 )
775 )
775
776
776 Session().flush()
777 Session().flush()
777 # we now calculate the status of pull request, and based on that
778 # we now calculate the status of pull request, and based on that
778 # calculation we set the commits status
779 # calculation we set the commits status
779 calculated_status = pull_request.calculated_review_status()
780 calculated_status = pull_request.calculated_review_status()
780 if old_calculated_status != calculated_status:
781 if old_calculated_status != calculated_status:
781 PullRequestModel()._trigger_pull_request_hook(
782 PullRequestModel()._trigger_pull_request_hook(
782 pull_request, c.rhodecode_user, 'review_status_change')
783 pull_request, c.rhodecode_user, 'review_status_change')
783
784
784 calculated_status_lbl = ChangesetStatus.get_status_lbl(
785 calculated_status_lbl = ChangesetStatus.get_status_lbl(
785 calculated_status)
786 calculated_status)
786
787
787 if close_pr:
788 if close_pr:
788 status_completed = (
789 status_completed = (
789 calculated_status in [ChangesetStatus.STATUS_APPROVED,
790 calculated_status in [ChangesetStatus.STATUS_APPROVED,
790 ChangesetStatus.STATUS_REJECTED])
791 ChangesetStatus.STATUS_REJECTED])
791 if forced or status_completed:
792 if forced or status_completed:
792 PullRequestModel().close_pull_request(
793 PullRequestModel().close_pull_request(
793 pull_request_id, c.rhodecode_user)
794 pull_request_id, c.rhodecode_user)
794 else:
795 else:
795 h.flash(_('Closing pull request on other statuses than '
796 h.flash(_('Closing pull request on other statuses than '
796 'rejected or approved is forbidden. '
797 'rejected or approved is forbidden. '
797 'Calculated status from all reviewers '
798 'Calculated status from all reviewers '
798 'is currently: %s') % calculated_status_lbl,
799 'is currently: %s') % calculated_status_lbl,
799 category='warning')
800 category='warning')
800
801
801 Session().commit()
802 Session().commit()
802
803
803 if not request.is_xhr:
804 if not request.is_xhr:
804 return redirect(h.url('pullrequest_show', repo_name=repo_name,
805 return redirect(h.url('pullrequest_show', repo_name=repo_name,
805 pull_request_id=pull_request_id))
806 pull_request_id=pull_request_id))
806
807
807 data = {
808 data = {
808 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
809 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
809 }
810 }
810 if comm:
811 if comm:
811 c.co = comm
812 c.co = comm
812 data.update(comm.get_dict())
813 data.update(comm.get_dict())
813 data.update({'rendered_text':
814 data.update({'rendered_text':
814 render('changeset/changeset_comment_block.html')})
815 render('changeset/changeset_comment_block.html')})
815
816
816 return data
817 return data
817
818
818 @LoginRequired()
819 @LoginRequired()
819 @NotAnonymous()
820 @NotAnonymous()
820 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
821 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
821 'repository.admin')
822 'repository.admin')
822 @auth.CSRFRequired()
823 @auth.CSRFRequired()
823 @jsonify
824 @jsonify
824 def delete_comment(self, repo_name, comment_id):
825 def delete_comment(self, repo_name, comment_id):
825 return self._delete_comment(comment_id)
826 return self._delete_comment(comment_id)
826
827
827 def _delete_comment(self, comment_id):
828 def _delete_comment(self, comment_id):
828 comment_id = safe_int(comment_id)
829 comment_id = safe_int(comment_id)
829 co = ChangesetComment.get_or_404(comment_id)
830 co = ChangesetComment.get_or_404(comment_id)
830 if co.pull_request.is_closed():
831 if co.pull_request.is_closed():
831 # don't allow deleting comments on closed pull request
832 # don't allow deleting comments on closed pull request
832 raise HTTPForbidden()
833 raise HTTPForbidden()
833
834
834 is_owner = co.author.user_id == c.rhodecode_user.user_id
835 is_owner = co.author.user_id == c.rhodecode_user.user_id
835 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
836 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
836 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
837 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
837 old_calculated_status = co.pull_request.calculated_review_status()
838 old_calculated_status = co.pull_request.calculated_review_status()
838 ChangesetCommentsModel().delete(comment=co)
839 ChangesetCommentsModel().delete(comment=co)
839 Session().commit()
840 Session().commit()
840 calculated_status = co.pull_request.calculated_review_status()
841 calculated_status = co.pull_request.calculated_review_status()
841 if old_calculated_status != calculated_status:
842 if old_calculated_status != calculated_status:
842 PullRequestModel()._trigger_pull_request_hook(
843 PullRequestModel()._trigger_pull_request_hook(
843 co.pull_request, c.rhodecode_user, 'review_status_change')
844 co.pull_request, c.rhodecode_user, 'review_status_change')
844 return True
845 return True
845 else:
846 else:
846 raise HTTPForbidden()
847 raise HTTPForbidden()
@@ -1,59 +1,57 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from pyramid.threadlocal import get_current_registry
19 from pyramid.threadlocal import get_current_registry
20
20
21
21
22 class RhodecodeEvent(object):
23 """
24 Base event class for all Rhodecode events
25 """
26
27
28 def trigger(event):
22 def trigger(event):
29 """
23 """
30 Helper method to send an event. This wraps the pyramid logic to send an
24 Helper method to send an event. This wraps the pyramid logic to send an
31 event.
25 event.
32 """
26 """
33 # For the first step we are using pyramids thread locals here. If the
27 # For the first step we are using pyramids thread locals here. If the
34 # event mechanism works out as a good solution we should think about
28 # event mechanism works out as a good solution we should think about
35 # passing the registry as an argument to get rid of it.
29 # passing the registry as an argument to get rid of it.
36 registry = get_current_registry()
30 registry = get_current_registry()
37 registry.notify(event)
31 registry.notify(event)
38
32
39
33
34 from rhodecode.events.base import RhodecodeEvent
35
40 from rhodecode.events.user import (
36 from rhodecode.events.user import (
41 UserPreCreate,
37 UserPreCreate,
42 UserPreUpdate,
38 UserPreUpdate,
43 UserRegistered
39 UserRegistered
44 )
40 )
45
41
46 from rhodecode.events.repo import (
42 from rhodecode.events.repo import (
43 RepoEvent,
47 RepoPreCreateEvent, RepoCreatedEvent,
44 RepoPreCreateEvent, RepoCreatedEvent,
48 RepoPreDeleteEvent, RepoDeletedEvent,
45 RepoPreDeleteEvent, RepoDeletedEvent,
49 RepoPrePushEvent, RepoPushEvent,
46 RepoPrePushEvent, RepoPushEvent,
50 RepoPrePullEvent, RepoPullEvent,
47 RepoPrePullEvent, RepoPullEvent,
51 )
48 )
52
49
53 from rhodecode.events.pullrequest import (
50 from rhodecode.events.pullrequest import (
51 PullRequestEvent,
54 PullRequestCreateEvent,
52 PullRequestCreateEvent,
55 PullRequestUpdateEvent,
53 PullRequestUpdateEvent,
56 PullRequestReviewEvent,
54 PullRequestReviewEvent,
57 PullRequestMergeEvent,
55 PullRequestMergeEvent,
58 PullRequestCloseEvent,
56 PullRequestCloseEvent,
59 ) No newline at end of file
57 )
@@ -1,72 +1,97 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from marshmallow import Schema, fields
20
19 from rhodecode.events.repo import RepoEvent
21 from rhodecode.events.repo import RepoEvent
20
22
21
23
24 def get_pull_request_url(pull_request):
25 from rhodecode.model.pull_request import PullRequestModel
26 return PullRequestModel().get_url(pull_request)
27
28
29 class PullRequestSchema(Schema):
30 """
31 Marshmallow schema for a pull request
32 """
33 pull_request_id = fields.Integer()
34 url = fields.Function(get_pull_request_url)
35 title = fields.Str()
36
37
38 class PullRequestEventSchema(RepoEvent.MarshmallowSchema):
39 """
40 Marshmallow schema for a pull request event
41 """
42 pullrequest = fields.Nested(PullRequestSchema)
43
44
22 class PullRequestEvent(RepoEvent):
45 class PullRequestEvent(RepoEvent):
23 """
46 """
24 Base class for events acting on a repository.
47 Base class for pull request events.
25
48
26 :param repo: a :class:`Repository` instance
49 :param pullrequest: a :class:`PullRequest` instance
27 """
50 """
51 MarshmallowSchema = PullRequestEventSchema
52
28 def __init__(self, pullrequest):
53 def __init__(self, pullrequest):
29 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
54 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
30 self.pullrequest = pullrequest
55 self.pullrequest = pullrequest
31
56
32
57
33 class PullRequestCreateEvent(PullRequestEvent):
58 class PullRequestCreateEvent(PullRequestEvent):
34 """
59 """
35 An instance of this class is emitted as an :term:`event` after a pull
60 An instance of this class is emitted as an :term:`event` after a pull
36 request is created.
61 request is created.
37 """
62 """
38 name = 'pullrequest-create'
63 name = 'pullrequest-create'
39
64
40
65
41 class PullRequestCloseEvent(PullRequestEvent):
66 class PullRequestCloseEvent(PullRequestEvent):
42 """
67 """
43 An instance of this class is emitted as an :term:`event` after a pull
68 An instance of this class is emitted as an :term:`event` after a pull
44 request is closed.
69 request is closed.
45 """
70 """
46 name = 'pullrequest-close'
71 name = 'pullrequest-close'
47
72
48
73
49 class PullRequestUpdateEvent(PullRequestEvent):
74 class PullRequestUpdateEvent(PullRequestEvent):
50 """
75 """
51 An instance of this class is emitted as an :term:`event` after a pull
76 An instance of this class is emitted as an :term:`event` after a pull
52 request is updated.
77 request is updated.
53 """
78 """
54 name = 'pullrequest-update'
79 name = 'pullrequest-update'
55
80
56
81
57 class PullRequestMergeEvent(PullRequestEvent):
82 class PullRequestMergeEvent(PullRequestEvent):
58 """
83 """
59 An instance of this class is emitted as an :term:`event` after a pull
84 An instance of this class is emitted as an :term:`event` after a pull
60 request is merged.
85 request is merged.
61 """
86 """
62 name = 'pullrequest-merge'
87 name = 'pullrequest-merge'
63
88
64
89
65 class PullRequestReviewEvent(PullRequestEvent):
90 class PullRequestReviewEvent(PullRequestEvent):
66 """
91 """
67 An instance of this class is emitted as an :term:`event` after a pull
92 An instance of this class is emitted as an :term:`event` after a pull
68 request is reviewed.
93 request is reviewed.
69 """
94 """
70 name = 'pullrequest-review'
95 name = 'pullrequest-review'
71
96
72
97
@@ -1,113 +1,149 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from marshmallow import Schema, fields
20
19 from rhodecode.model.db import Repository, Session
21 from rhodecode.model.db import Repository, Session
20 from rhodecode.events import RhodecodeEvent
22 from rhodecode.events.base import RhodecodeEvent
23
24
25 def get_pull_request_url(repo):
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
28
29
30 class RepositorySchema(Schema):
31 """
32 Marshmallow schema for a repository
33 """
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
36 url = fields.Function(get_pull_request_url)
37
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
41 Marshmallow schema for a repository event
42 """
43 repository = fields.Nested(RepositorySchema)
21
44
22
45
23 class RepoEvent(RhodecodeEvent):
46 class RepoEvent(RhodecodeEvent):
24 """
47 """
25 Base class for events acting on a repository.
48 Base class for events acting on a repository.
26
49
27 :param repo: a :class:`Repository` instance
50 :param repo: a :class:`Repository` instance
28 """
51 """
52 MarshmallowSchema = RepoEventSchema
53
29 def __init__(self, repo):
54 def __init__(self, repo):
55 super(RepoEvent, self).__init__()
30 self.repo = repo
56 self.repo = repo
31
57
32
58
33 class RepoPreCreateEvent(RepoEvent):
59 class RepoPreCreateEvent(RepoEvent):
34 """
60 """
35 An instance of this class is emitted as an :term:`event` before a repo is
61 An instance of this class is emitted as an :term:`event` before a repo is
36 created.
62 created.
37 """
63 """
38 name = 'repo-pre-create'
64 name = 'repo-pre-create'
39
65
40
66
41 class RepoCreatedEvent(RepoEvent):
67 class RepoCreatedEvent(RepoEvent):
42 """
68 """
43 An instance of this class is emitted as an :term:`event` whenever a repo is
69 An instance of this class is emitted as an :term:`event` whenever a repo is
44 created.
70 created.
45 """
71 """
46 name = 'repo-created'
72 name = 'repo-created'
47
73
48
74
49 class RepoPreDeleteEvent(RepoEvent):
75 class RepoPreDeleteEvent(RepoEvent):
50 """
76 """
51 An instance of this class is emitted as an :term:`event` whenever a repo is
77 An instance of this class is emitted as an :term:`event` whenever a repo is
52 created.
78 created.
53 """
79 """
54 name = 'repo-pre-delete'
80 name = 'repo-pre-delete'
55
81
56
82
57 class RepoDeletedEvent(RepoEvent):
83 class RepoDeletedEvent(RepoEvent):
58 """
84 """
59 An instance of this class is emitted as an :term:`event` whenever a repo is
85 An instance of this class is emitted as an :term:`event` whenever a repo is
60 created.
86 created.
61 """
87 """
62 name = 'repo-deleted'
88 name = 'repo-deleted'
63
89
64
90
65 class RepoVCSEvent(RepoEvent):
91 class RepoVCSEvent(RepoEvent):
66 """
92 """
67 Base class for events triggered by the VCS
93 Base class for events triggered by the VCS
68 """
94 """
69 def __init__(self, repo_name, extras):
95 def __init__(self, repo_name, extras):
70 self.repo = Repository.get_by_repo_name(repo_name)
96 self.repo = Repository.get_by_repo_name(repo_name)
71 if not self.repo:
97 if not self.repo:
72 raise Exception('repo by this name %s does not exist' % repo_name)
98 raise Exception('repo by this name %s does not exist' % repo_name)
73 self.extras = extras
99 self.extras = extras
74 super(RepoVCSEvent, self).__init__(self.repo)
100 super(RepoVCSEvent, self).__init__(self.repo)
75
101
102 @property
103 def acting_user(self):
104 if self.extras.get('username'):
105 return User.get_by_username(extras['username'])
106
107 @property
108 def acting_ip(self):
109 if self.extras.get('ip'):
110 return User.get_by_username(extras['ip'])
111
76
112
77 class RepoPrePullEvent(RepoVCSEvent):
113 class RepoPrePullEvent(RepoVCSEvent):
78 """
114 """
79 An instance of this class is emitted as an :term:`event` before commits
115 An instance of this class is emitted as an :term:`event` before commits
80 are pulled from a repo.
116 are pulled from a repo.
81 """
117 """
82 name = 'repo-pre-pull'
118 name = 'repo-pre-pull'
83
119
84
120
85 class RepoPullEvent(RepoVCSEvent):
121 class RepoPullEvent(RepoVCSEvent):
86 """
122 """
87 An instance of this class is emitted as an :term:`event` after commits
123 An instance of this class is emitted as an :term:`event` after commits
88 are pulled from a repo.
124 are pulled from a repo.
89 """
125 """
90 name = 'repo-pull'
126 name = 'repo-pull'
91
127
92
128
93 class RepoPrePushEvent(RepoVCSEvent):
129 class RepoPrePushEvent(RepoVCSEvent):
94 """
130 """
95 An instance of this class is emitted as an :term:`event` before commits
131 An instance of this class is emitted as an :term:`event` before commits
96 are pushed to a repo.
132 are pushed to a repo.
97 """
133 """
98 name = 'repo-pre-push'
134 name = 'repo-pre-push'
99
135
100
136
101 class RepoPushEvent(RepoVCSEvent):
137 class RepoPushEvent(RepoVCSEvent):
102 """
138 """
103 An instance of this class is emitted as an :term:`event` after commits
139 An instance of this class is emitted as an :term:`event` after commits
104 are pushed to a repo.
140 are pushed to a repo.
105
141
106 :param extras: (optional) dict of data from proxied VCS actions
142 :param extras: (optional) dict of data from proxied VCS actions
107 """
143 """
108 name = 'repo-push'
144 name = 'repo-push'
109
145
110 def __init__(self, repo_name, pushed_commit_ids, extras):
146 def __init__(self, repo_name, pushed_commit_ids, extras):
111 super(RepoPushEvent, self).__init__(repo_name, extras)
147 super(RepoPushEvent, self).__init__(repo_name, extras)
112 self.pushed_commit_ids = pushed_commit_ids
148 self.pushed_commit_ids = pushed_commit_ids
113
149
@@ -1,54 +1,55 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from zope.interface import implementer
19 from zope.interface import implementer
20 from rhodecode.events import RhodecodeEvent
20
21 from rhodecode.events.base import RhodecodeEvent
21 from rhodecode.events.interfaces import (
22 from rhodecode.events.interfaces import (
22 IUserRegistered, IUserPreCreate, IUserPreUpdate)
23 IUserRegistered, IUserPreCreate, IUserPreUpdate)
23
24
24
25
25 @implementer(IUserRegistered)
26 @implementer(IUserRegistered)
26 class UserRegistered(RhodecodeEvent):
27 class UserRegistered(RhodecodeEvent):
27 """
28 """
28 An instance of this class is emitted as an :term:`event` whenever a user
29 An instance of this class is emitted as an :term:`event` whenever a user
29 account is registered.
30 account is registered.
30 """
31 """
31 def __init__(self, user, session):
32 def __init__(self, user, session):
32 self.user = user
33 self.user = user
33 self.session = session
34 self.session = session
34
35
35
36
36 @implementer(IUserPreCreate)
37 @implementer(IUserPreCreate)
37 class UserPreCreate(RhodecodeEvent):
38 class UserPreCreate(RhodecodeEvent):
38 """
39 """
39 An instance of this class is emitted as an :term:`event` before a new user
40 An instance of this class is emitted as an :term:`event` before a new user
40 object is created.
41 object is created.
41 """
42 """
42 def __init__(self, user_data):
43 def __init__(self, user_data):
43 self.user_data = user_data
44 self.user_data = user_data
44
45
45
46
46 @implementer(IUserPreUpdate)
47 @implementer(IUserPreUpdate)
47 class UserPreUpdate(RhodecodeEvent):
48 class UserPreUpdate(RhodecodeEvent):
48 """
49 """
49 An instance of this class is emitted as an :term:`event` before a user
50 An instance of this class is emitted as an :term:`event` before a user
50 object is updated.
51 object is updated.
51 """
52 """
52 def __init__(self, user, user_data):
53 def __init__(self, user, user_data):
53 self.user = user
54 self.user = user
54 self.user_data = user_data
55 self.user_data = user_data
@@ -1,3477 +1,3477 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database Models for RhodeCode Enterprise
22 Database Models for RhodeCode Enterprise
23 """
23 """
24
24
25 import os
25 import os
26 import sys
26 import sys
27 import time
27 import time
28 import hashlib
28 import hashlib
29 import logging
29 import logging
30 import datetime
30 import datetime
31 import warnings
31 import warnings
32 import ipaddress
32 import ipaddress
33 import functools
33 import functools
34 import traceback
34 import traceback
35 import collections
35 import collections
36
36
37
37
38 from sqlalchemy import *
38 from sqlalchemy import *
39 from sqlalchemy.exc import IntegrityError
39 from sqlalchemy.exc import IntegrityError
40 from sqlalchemy.ext.declarative import declared_attr
40 from sqlalchemy.ext.declarative import declared_attr
41 from sqlalchemy.ext.hybrid import hybrid_property
41 from sqlalchemy.ext.hybrid import hybrid_property
42 from sqlalchemy.orm import (
42 from sqlalchemy.orm import (
43 relationship, joinedload, class_mapper, validates, aliased)
43 relationship, joinedload, class_mapper, validates, aliased)
44 from sqlalchemy.sql.expression import true
44 from sqlalchemy.sql.expression import true
45 from beaker.cache import cache_region, region_invalidate
45 from beaker.cache import cache_region, region_invalidate
46 from webob.exc import HTTPNotFound
46 from webob.exc import HTTPNotFound
47 from zope.cachedescriptors.property import Lazy as LazyProperty
47 from zope.cachedescriptors.property import Lazy as LazyProperty
48
48
49 from pylons import url
49 from pylons import url
50 from pylons.i18n.translation import lazy_ugettext as _
50 from pylons.i18n.translation import lazy_ugettext as _
51
51
52 from rhodecode.lib.vcs import get_backend
52 from rhodecode.lib.vcs import get_backend
53 from rhodecode.lib.vcs.utils.helpers import get_scm
53 from rhodecode.lib.vcs.utils.helpers import get_scm
54 from rhodecode.lib.vcs.exceptions import VCSError
54 from rhodecode.lib.vcs.exceptions import VCSError
55 from rhodecode.lib.vcs.backends.base import (
55 from rhodecode.lib.vcs.backends.base import (
56 EmptyCommit, Reference, MergeFailureReason)
56 EmptyCommit, Reference, MergeFailureReason)
57 from rhodecode.lib.utils2 import (
57 from rhodecode.lib.utils2 import (
58 str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe,
58 str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe,
59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict)
59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict)
60 from rhodecode.lib.ext_json import json
60 from rhodecode.lib.ext_json import json
61 from rhodecode.lib.caching_query import FromCache
61 from rhodecode.lib.caching_query import FromCache
62 from rhodecode.lib.encrypt import AESCipher
62 from rhodecode.lib.encrypt import AESCipher
63
63
64 from rhodecode.model.meta import Base, Session
64 from rhodecode.model.meta import Base, Session
65
65
66 URL_SEP = '/'
66 URL_SEP = '/'
67 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
68
68
69 # =============================================================================
69 # =============================================================================
70 # BASE CLASSES
70 # BASE CLASSES
71 # =============================================================================
71 # =============================================================================
72
72
73 # this is propagated from .ini file rhodecode.encrypted_values.secret or
73 # this is propagated from .ini file rhodecode.encrypted_values.secret or
74 # beaker.session.secret if first is not set.
74 # beaker.session.secret if first is not set.
75 # and initialized at environment.py
75 # and initialized at environment.py
76 ENCRYPTION_KEY = None
76 ENCRYPTION_KEY = None
77
77
78 # used to sort permissions by types, '#' used here is not allowed to be in
78 # used to sort permissions by types, '#' used here is not allowed to be in
79 # usernames, and it's very early in sorted string.printable table.
79 # usernames, and it's very early in sorted string.printable table.
80 PERMISSION_TYPE_SORT = {
80 PERMISSION_TYPE_SORT = {
81 'admin': '####',
81 'admin': '####',
82 'write': '###',
82 'write': '###',
83 'read': '##',
83 'read': '##',
84 'none': '#',
84 'none': '#',
85 }
85 }
86
86
87
87
88 def display_sort(obj):
88 def display_sort(obj):
89 """
89 """
90 Sort function used to sort permissions in .permissions() function of
90 Sort function used to sort permissions in .permissions() function of
91 Repository, RepoGroup, UserGroup. Also it put the default user in front
91 Repository, RepoGroup, UserGroup. Also it put the default user in front
92 of all other resources
92 of all other resources
93 """
93 """
94
94
95 if obj.username == User.DEFAULT_USER:
95 if obj.username == User.DEFAULT_USER:
96 return '#####'
96 return '#####'
97 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
97 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
98 return prefix + obj.username
98 return prefix + obj.username
99
99
100
100
101 def _hash_key(k):
101 def _hash_key(k):
102 return md5_safe(k)
102 return md5_safe(k)
103
103
104
104
105 class EncryptedTextValue(TypeDecorator):
105 class EncryptedTextValue(TypeDecorator):
106 """
106 """
107 Special column for encrypted long text data, use like::
107 Special column for encrypted long text data, use like::
108
108
109 value = Column("encrypted_value", EncryptedValue(), nullable=False)
109 value = Column("encrypted_value", EncryptedValue(), nullable=False)
110
110
111 This column is intelligent so if value is in unencrypted form it return
111 This column is intelligent so if value is in unencrypted form it return
112 unencrypted form, but on save it always encrypts
112 unencrypted form, but on save it always encrypts
113 """
113 """
114 impl = Text
114 impl = Text
115
115
116 def process_bind_param(self, value, dialect):
116 def process_bind_param(self, value, dialect):
117 if not value:
117 if not value:
118 return value
118 return value
119 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
119 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
120 # protect against double encrypting if someone manually starts
120 # protect against double encrypting if someone manually starts
121 # doing
121 # doing
122 raise ValueError('value needs to be in unencrypted format, ie. '
122 raise ValueError('value needs to be in unencrypted format, ie. '
123 'not starting with enc$aes')
123 'not starting with enc$aes')
124 return 'enc$aes_hmac$%s' % AESCipher(
124 return 'enc$aes_hmac$%s' % AESCipher(
125 ENCRYPTION_KEY, hmac=True).encrypt(value)
125 ENCRYPTION_KEY, hmac=True).encrypt(value)
126
126
127 def process_result_value(self, value, dialect):
127 def process_result_value(self, value, dialect):
128 import rhodecode
128 import rhodecode
129
129
130 if not value:
130 if not value:
131 return value
131 return value
132
132
133 parts = value.split('$', 3)
133 parts = value.split('$', 3)
134 if not len(parts) == 3:
134 if not len(parts) == 3:
135 # probably not encrypted values
135 # probably not encrypted values
136 return value
136 return value
137 else:
137 else:
138 if parts[0] != 'enc':
138 if parts[0] != 'enc':
139 # parts ok but without our header ?
139 # parts ok but without our header ?
140 return value
140 return value
141 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
141 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
142 'rhodecode.encrypted_values.strict') or True)
142 'rhodecode.encrypted_values.strict') or True)
143 # at that stage we know it's our encryption
143 # at that stage we know it's our encryption
144 if parts[1] == 'aes':
144 if parts[1] == 'aes':
145 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
145 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
146 elif parts[1] == 'aes_hmac':
146 elif parts[1] == 'aes_hmac':
147 decrypted_data = AESCipher(
147 decrypted_data = AESCipher(
148 ENCRYPTION_KEY, hmac=True,
148 ENCRYPTION_KEY, hmac=True,
149 strict_verification=enc_strict_mode).decrypt(parts[2])
149 strict_verification=enc_strict_mode).decrypt(parts[2])
150 else:
150 else:
151 raise ValueError(
151 raise ValueError(
152 'Encryption type part is wrong, must be `aes` '
152 'Encryption type part is wrong, must be `aes` '
153 'or `aes_hmac`, got `%s` instead' % (parts[1]))
153 'or `aes_hmac`, got `%s` instead' % (parts[1]))
154 return decrypted_data
154 return decrypted_data
155
155
156
156
157 class BaseModel(object):
157 class BaseModel(object):
158 """
158 """
159 Base Model for all classes
159 Base Model for all classes
160 """
160 """
161
161
162 @classmethod
162 @classmethod
163 def _get_keys(cls):
163 def _get_keys(cls):
164 """return column names for this model """
164 """return column names for this model """
165 return class_mapper(cls).c.keys()
165 return class_mapper(cls).c.keys()
166
166
167 def get_dict(self):
167 def get_dict(self):
168 """
168 """
169 return dict with keys and values corresponding
169 return dict with keys and values corresponding
170 to this model data """
170 to this model data """
171
171
172 d = {}
172 d = {}
173 for k in self._get_keys():
173 for k in self._get_keys():
174 d[k] = getattr(self, k)
174 d[k] = getattr(self, k)
175
175
176 # also use __json__() if present to get additional fields
176 # also use __json__() if present to get additional fields
177 _json_attr = getattr(self, '__json__', None)
177 _json_attr = getattr(self, '__json__', None)
178 if _json_attr:
178 if _json_attr:
179 # update with attributes from __json__
179 # update with attributes from __json__
180 if callable(_json_attr):
180 if callable(_json_attr):
181 _json_attr = _json_attr()
181 _json_attr = _json_attr()
182 for k, val in _json_attr.iteritems():
182 for k, val in _json_attr.iteritems():
183 d[k] = val
183 d[k] = val
184 return d
184 return d
185
185
186 def get_appstruct(self):
186 def get_appstruct(self):
187 """return list with keys and values tuples corresponding
187 """return list with keys and values tuples corresponding
188 to this model data """
188 to this model data """
189
189
190 l = []
190 l = []
191 for k in self._get_keys():
191 for k in self._get_keys():
192 l.append((k, getattr(self, k),))
192 l.append((k, getattr(self, k),))
193 return l
193 return l
194
194
195 def populate_obj(self, populate_dict):
195 def populate_obj(self, populate_dict):
196 """populate model with data from given populate_dict"""
196 """populate model with data from given populate_dict"""
197
197
198 for k in self._get_keys():
198 for k in self._get_keys():
199 if k in populate_dict:
199 if k in populate_dict:
200 setattr(self, k, populate_dict[k])
200 setattr(self, k, populate_dict[k])
201
201
202 @classmethod
202 @classmethod
203 def query(cls):
203 def query(cls):
204 return Session().query(cls)
204 return Session().query(cls)
205
205
206 @classmethod
206 @classmethod
207 def get(cls, id_):
207 def get(cls, id_):
208 if id_:
208 if id_:
209 return cls.query().get(id_)
209 return cls.query().get(id_)
210
210
211 @classmethod
211 @classmethod
212 def get_or_404(cls, id_):
212 def get_or_404(cls, id_):
213 try:
213 try:
214 id_ = int(id_)
214 id_ = int(id_)
215 except (TypeError, ValueError):
215 except (TypeError, ValueError):
216 raise HTTPNotFound
216 raise HTTPNotFound
217
217
218 res = cls.query().get(id_)
218 res = cls.query().get(id_)
219 if not res:
219 if not res:
220 raise HTTPNotFound
220 raise HTTPNotFound
221 return res
221 return res
222
222
223 @classmethod
223 @classmethod
224 def getAll(cls):
224 def getAll(cls):
225 # deprecated and left for backward compatibility
225 # deprecated and left for backward compatibility
226 return cls.get_all()
226 return cls.get_all()
227
227
228 @classmethod
228 @classmethod
229 def get_all(cls):
229 def get_all(cls):
230 return cls.query().all()
230 return cls.query().all()
231
231
232 @classmethod
232 @classmethod
233 def delete(cls, id_):
233 def delete(cls, id_):
234 obj = cls.query().get(id_)
234 obj = cls.query().get(id_)
235 Session().delete(obj)
235 Session().delete(obj)
236
236
237 @classmethod
237 @classmethod
238 def identity_cache(cls, session, attr_name, value):
238 def identity_cache(cls, session, attr_name, value):
239 exist_in_session = []
239 exist_in_session = []
240 for (item_cls, pkey), instance in session.identity_map.items():
240 for (item_cls, pkey), instance in session.identity_map.items():
241 if cls == item_cls and getattr(instance, attr_name) == value:
241 if cls == item_cls and getattr(instance, attr_name) == value:
242 exist_in_session.append(instance)
242 exist_in_session.append(instance)
243 if exist_in_session:
243 if exist_in_session:
244 if len(exist_in_session) == 1:
244 if len(exist_in_session) == 1:
245 return exist_in_session[0]
245 return exist_in_session[0]
246 log.exception(
246 log.exception(
247 'multiple objects with attr %s and '
247 'multiple objects with attr %s and '
248 'value %s found with same name: %r',
248 'value %s found with same name: %r',
249 attr_name, value, exist_in_session)
249 attr_name, value, exist_in_session)
250
250
251 def __repr__(self):
251 def __repr__(self):
252 if hasattr(self, '__unicode__'):
252 if hasattr(self, '__unicode__'):
253 # python repr needs to return str
253 # python repr needs to return str
254 try:
254 try:
255 return safe_str(self.__unicode__())
255 return safe_str(self.__unicode__())
256 except UnicodeDecodeError:
256 except UnicodeDecodeError:
257 pass
257 pass
258 return '<DB:%s>' % (self.__class__.__name__)
258 return '<DB:%s>' % (self.__class__.__name__)
259
259
260
260
261 class RhodeCodeSetting(Base, BaseModel):
261 class RhodeCodeSetting(Base, BaseModel):
262 __tablename__ = 'rhodecode_settings'
262 __tablename__ = 'rhodecode_settings'
263 __table_args__ = (
263 __table_args__ = (
264 UniqueConstraint('app_settings_name'),
264 UniqueConstraint('app_settings_name'),
265 {'extend_existing': True, 'mysql_engine': 'InnoDB',
265 {'extend_existing': True, 'mysql_engine': 'InnoDB',
266 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
266 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
267 )
267 )
268
268
269 SETTINGS_TYPES = {
269 SETTINGS_TYPES = {
270 'str': safe_str,
270 'str': safe_str,
271 'int': safe_int,
271 'int': safe_int,
272 'unicode': safe_unicode,
272 'unicode': safe_unicode,
273 'bool': str2bool,
273 'bool': str2bool,
274 'list': functools.partial(aslist, sep=',')
274 'list': functools.partial(aslist, sep=',')
275 }
275 }
276 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
276 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
277 GLOBAL_CONF_KEY = 'app_settings'
277 GLOBAL_CONF_KEY = 'app_settings'
278
278
279 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
279 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
280 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
280 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
281 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
281 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
282 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
282 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
283
283
284 def __init__(self, key='', val='', type='unicode'):
284 def __init__(self, key='', val='', type='unicode'):
285 self.app_settings_name = key
285 self.app_settings_name = key
286 self.app_settings_type = type
286 self.app_settings_type = type
287 self.app_settings_value = val
287 self.app_settings_value = val
288
288
289 @validates('_app_settings_value')
289 @validates('_app_settings_value')
290 def validate_settings_value(self, key, val):
290 def validate_settings_value(self, key, val):
291 assert type(val) == unicode
291 assert type(val) == unicode
292 return val
292 return val
293
293
294 @hybrid_property
294 @hybrid_property
295 def app_settings_value(self):
295 def app_settings_value(self):
296 v = self._app_settings_value
296 v = self._app_settings_value
297 _type = self.app_settings_type
297 _type = self.app_settings_type
298 if _type:
298 if _type:
299 _type = self.app_settings_type.split('.')[0]
299 _type = self.app_settings_type.split('.')[0]
300 # decode the encrypted value
300 # decode the encrypted value
301 if 'encrypted' in self.app_settings_type:
301 if 'encrypted' in self.app_settings_type:
302 cipher = EncryptedTextValue()
302 cipher = EncryptedTextValue()
303 v = safe_unicode(cipher.process_result_value(v, None))
303 v = safe_unicode(cipher.process_result_value(v, None))
304
304
305 converter = self.SETTINGS_TYPES.get(_type) or \
305 converter = self.SETTINGS_TYPES.get(_type) or \
306 self.SETTINGS_TYPES['unicode']
306 self.SETTINGS_TYPES['unicode']
307 return converter(v)
307 return converter(v)
308
308
309 @app_settings_value.setter
309 @app_settings_value.setter
310 def app_settings_value(self, val):
310 def app_settings_value(self, val):
311 """
311 """
312 Setter that will always make sure we use unicode in app_settings_value
312 Setter that will always make sure we use unicode in app_settings_value
313
313
314 :param val:
314 :param val:
315 """
315 """
316 val = safe_unicode(val)
316 val = safe_unicode(val)
317 # encode the encrypted value
317 # encode the encrypted value
318 if 'encrypted' in self.app_settings_type:
318 if 'encrypted' in self.app_settings_type:
319 cipher = EncryptedTextValue()
319 cipher = EncryptedTextValue()
320 val = safe_unicode(cipher.process_bind_param(val, None))
320 val = safe_unicode(cipher.process_bind_param(val, None))
321 self._app_settings_value = val
321 self._app_settings_value = val
322
322
323 @hybrid_property
323 @hybrid_property
324 def app_settings_type(self):
324 def app_settings_type(self):
325 return self._app_settings_type
325 return self._app_settings_type
326
326
327 @app_settings_type.setter
327 @app_settings_type.setter
328 def app_settings_type(self, val):
328 def app_settings_type(self, val):
329 if val.split('.')[0] not in self.SETTINGS_TYPES:
329 if val.split('.')[0] not in self.SETTINGS_TYPES:
330 raise Exception('type must be one of %s got %s'
330 raise Exception('type must be one of %s got %s'
331 % (self.SETTINGS_TYPES.keys(), val))
331 % (self.SETTINGS_TYPES.keys(), val))
332 self._app_settings_type = val
332 self._app_settings_type = val
333
333
334 def __unicode__(self):
334 def __unicode__(self):
335 return u"<%s('%s:%s[%s]')>" % (
335 return u"<%s('%s:%s[%s]')>" % (
336 self.__class__.__name__,
336 self.__class__.__name__,
337 self.app_settings_name, self.app_settings_value,
337 self.app_settings_name, self.app_settings_value,
338 self.app_settings_type
338 self.app_settings_type
339 )
339 )
340
340
341
341
342 class RhodeCodeUi(Base, BaseModel):
342 class RhodeCodeUi(Base, BaseModel):
343 __tablename__ = 'rhodecode_ui'
343 __tablename__ = 'rhodecode_ui'
344 __table_args__ = (
344 __table_args__ = (
345 UniqueConstraint('ui_key'),
345 UniqueConstraint('ui_key'),
346 {'extend_existing': True, 'mysql_engine': 'InnoDB',
346 {'extend_existing': True, 'mysql_engine': 'InnoDB',
347 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
347 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
348 )
348 )
349
349
350 HOOK_REPO_SIZE = 'changegroup.repo_size'
350 HOOK_REPO_SIZE = 'changegroup.repo_size'
351 # HG
351 # HG
352 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
352 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
353 HOOK_PULL = 'outgoing.pull_logger'
353 HOOK_PULL = 'outgoing.pull_logger'
354 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
354 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
355 HOOK_PUSH = 'changegroup.push_logger'
355 HOOK_PUSH = 'changegroup.push_logger'
356
356
357 # TODO: johbo: Unify way how hooks are configured for git and hg,
357 # TODO: johbo: Unify way how hooks are configured for git and hg,
358 # git part is currently hardcoded.
358 # git part is currently hardcoded.
359
359
360 # SVN PATTERNS
360 # SVN PATTERNS
361 SVN_BRANCH_ID = 'vcs_svn_branch'
361 SVN_BRANCH_ID = 'vcs_svn_branch'
362 SVN_TAG_ID = 'vcs_svn_tag'
362 SVN_TAG_ID = 'vcs_svn_tag'
363
363
364 ui_id = Column(
364 ui_id = Column(
365 "ui_id", Integer(), nullable=False, unique=True, default=None,
365 "ui_id", Integer(), nullable=False, unique=True, default=None,
366 primary_key=True)
366 primary_key=True)
367 ui_section = Column(
367 ui_section = Column(
368 "ui_section", String(255), nullable=True, unique=None, default=None)
368 "ui_section", String(255), nullable=True, unique=None, default=None)
369 ui_key = Column(
369 ui_key = Column(
370 "ui_key", String(255), nullable=True, unique=None, default=None)
370 "ui_key", String(255), nullable=True, unique=None, default=None)
371 ui_value = Column(
371 ui_value = Column(
372 "ui_value", String(255), nullable=True, unique=None, default=None)
372 "ui_value", String(255), nullable=True, unique=None, default=None)
373 ui_active = Column(
373 ui_active = Column(
374 "ui_active", Boolean(), nullable=True, unique=None, default=True)
374 "ui_active", Boolean(), nullable=True, unique=None, default=True)
375
375
376 def __repr__(self):
376 def __repr__(self):
377 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
377 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
378 self.ui_key, self.ui_value)
378 self.ui_key, self.ui_value)
379
379
380
380
381 class RepoRhodeCodeSetting(Base, BaseModel):
381 class RepoRhodeCodeSetting(Base, BaseModel):
382 __tablename__ = 'repo_rhodecode_settings'
382 __tablename__ = 'repo_rhodecode_settings'
383 __table_args__ = (
383 __table_args__ = (
384 UniqueConstraint(
384 UniqueConstraint(
385 'app_settings_name', 'repository_id',
385 'app_settings_name', 'repository_id',
386 name='uq_repo_rhodecode_setting_name_repo_id'),
386 name='uq_repo_rhodecode_setting_name_repo_id'),
387 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 {'extend_existing': True, 'mysql_engine': 'InnoDB',
388 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
389 )
389 )
390
390
391 repository_id = Column(
391 repository_id = Column(
392 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
392 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
393 nullable=False)
393 nullable=False)
394 app_settings_id = Column(
394 app_settings_id = Column(
395 "app_settings_id", Integer(), nullable=False, unique=True,
395 "app_settings_id", Integer(), nullable=False, unique=True,
396 default=None, primary_key=True)
396 default=None, primary_key=True)
397 app_settings_name = Column(
397 app_settings_name = Column(
398 "app_settings_name", String(255), nullable=True, unique=None,
398 "app_settings_name", String(255), nullable=True, unique=None,
399 default=None)
399 default=None)
400 _app_settings_value = Column(
400 _app_settings_value = Column(
401 "app_settings_value", String(4096), nullable=True, unique=None,
401 "app_settings_value", String(4096), nullable=True, unique=None,
402 default=None)
402 default=None)
403 _app_settings_type = Column(
403 _app_settings_type = Column(
404 "app_settings_type", String(255), nullable=True, unique=None,
404 "app_settings_type", String(255), nullable=True, unique=None,
405 default=None)
405 default=None)
406
406
407 repository = relationship('Repository')
407 repository = relationship('Repository')
408
408
409 def __init__(self, repository_id, key='', val='', type='unicode'):
409 def __init__(self, repository_id, key='', val='', type='unicode'):
410 self.repository_id = repository_id
410 self.repository_id = repository_id
411 self.app_settings_name = key
411 self.app_settings_name = key
412 self.app_settings_type = type
412 self.app_settings_type = type
413 self.app_settings_value = val
413 self.app_settings_value = val
414
414
415 @validates('_app_settings_value')
415 @validates('_app_settings_value')
416 def validate_settings_value(self, key, val):
416 def validate_settings_value(self, key, val):
417 assert type(val) == unicode
417 assert type(val) == unicode
418 return val
418 return val
419
419
420 @hybrid_property
420 @hybrid_property
421 def app_settings_value(self):
421 def app_settings_value(self):
422 v = self._app_settings_value
422 v = self._app_settings_value
423 type_ = self.app_settings_type
423 type_ = self.app_settings_type
424 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
424 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
425 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
425 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
426 return converter(v)
426 return converter(v)
427
427
428 @app_settings_value.setter
428 @app_settings_value.setter
429 def app_settings_value(self, val):
429 def app_settings_value(self, val):
430 """
430 """
431 Setter that will always make sure we use unicode in app_settings_value
431 Setter that will always make sure we use unicode in app_settings_value
432
432
433 :param val:
433 :param val:
434 """
434 """
435 self._app_settings_value = safe_unicode(val)
435 self._app_settings_value = safe_unicode(val)
436
436
437 @hybrid_property
437 @hybrid_property
438 def app_settings_type(self):
438 def app_settings_type(self):
439 return self._app_settings_type
439 return self._app_settings_type
440
440
441 @app_settings_type.setter
441 @app_settings_type.setter
442 def app_settings_type(self, val):
442 def app_settings_type(self, val):
443 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
443 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
444 if val not in SETTINGS_TYPES:
444 if val not in SETTINGS_TYPES:
445 raise Exception('type must be one of %s got %s'
445 raise Exception('type must be one of %s got %s'
446 % (SETTINGS_TYPES.keys(), val))
446 % (SETTINGS_TYPES.keys(), val))
447 self._app_settings_type = val
447 self._app_settings_type = val
448
448
449 def __unicode__(self):
449 def __unicode__(self):
450 return u"<%s('%s:%s:%s[%s]')>" % (
450 return u"<%s('%s:%s:%s[%s]')>" % (
451 self.__class__.__name__, self.repository.repo_name,
451 self.__class__.__name__, self.repository.repo_name,
452 self.app_settings_name, self.app_settings_value,
452 self.app_settings_name, self.app_settings_value,
453 self.app_settings_type
453 self.app_settings_type
454 )
454 )
455
455
456
456
457 class RepoRhodeCodeUi(Base, BaseModel):
457 class RepoRhodeCodeUi(Base, BaseModel):
458 __tablename__ = 'repo_rhodecode_ui'
458 __tablename__ = 'repo_rhodecode_ui'
459 __table_args__ = (
459 __table_args__ = (
460 UniqueConstraint(
460 UniqueConstraint(
461 'repository_id', 'ui_section', 'ui_key',
461 'repository_id', 'ui_section', 'ui_key',
462 name='uq_repo_rhodecode_ui_repository_id_section_key'),
462 name='uq_repo_rhodecode_ui_repository_id_section_key'),
463 {'extend_existing': True, 'mysql_engine': 'InnoDB',
463 {'extend_existing': True, 'mysql_engine': 'InnoDB',
464 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
464 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
465 )
465 )
466
466
467 repository_id = Column(
467 repository_id = Column(
468 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
468 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
469 nullable=False)
469 nullable=False)
470 ui_id = Column(
470 ui_id = Column(
471 "ui_id", Integer(), nullable=False, unique=True, default=None,
471 "ui_id", Integer(), nullable=False, unique=True, default=None,
472 primary_key=True)
472 primary_key=True)
473 ui_section = Column(
473 ui_section = Column(
474 "ui_section", String(255), nullable=True, unique=None, default=None)
474 "ui_section", String(255), nullable=True, unique=None, default=None)
475 ui_key = Column(
475 ui_key = Column(
476 "ui_key", String(255), nullable=True, unique=None, default=None)
476 "ui_key", String(255), nullable=True, unique=None, default=None)
477 ui_value = Column(
477 ui_value = Column(
478 "ui_value", String(255), nullable=True, unique=None, default=None)
478 "ui_value", String(255), nullable=True, unique=None, default=None)
479 ui_active = Column(
479 ui_active = Column(
480 "ui_active", Boolean(), nullable=True, unique=None, default=True)
480 "ui_active", Boolean(), nullable=True, unique=None, default=True)
481
481
482 repository = relationship('Repository')
482 repository = relationship('Repository')
483
483
484 def __repr__(self):
484 def __repr__(self):
485 return '<%s[%s:%s]%s=>%s]>' % (
485 return '<%s[%s:%s]%s=>%s]>' % (
486 self.__class__.__name__, self.repository.repo_name,
486 self.__class__.__name__, self.repository.repo_name,
487 self.ui_section, self.ui_key, self.ui_value)
487 self.ui_section, self.ui_key, self.ui_value)
488
488
489
489
490 class User(Base, BaseModel):
490 class User(Base, BaseModel):
491 __tablename__ = 'users'
491 __tablename__ = 'users'
492 __table_args__ = (
492 __table_args__ = (
493 UniqueConstraint('username'), UniqueConstraint('email'),
493 UniqueConstraint('username'), UniqueConstraint('email'),
494 Index('u_username_idx', 'username'),
494 Index('u_username_idx', 'username'),
495 Index('u_email_idx', 'email'),
495 Index('u_email_idx', 'email'),
496 {'extend_existing': True, 'mysql_engine': 'InnoDB',
496 {'extend_existing': True, 'mysql_engine': 'InnoDB',
497 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
497 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
498 )
498 )
499 DEFAULT_USER = 'default'
499 DEFAULT_USER = 'default'
500 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
500 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
501 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
501 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
502
502
503 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
503 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
504 username = Column("username", String(255), nullable=True, unique=None, default=None)
504 username = Column("username", String(255), nullable=True, unique=None, default=None)
505 password = Column("password", String(255), nullable=True, unique=None, default=None)
505 password = Column("password", String(255), nullable=True, unique=None, default=None)
506 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
506 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
507 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
507 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
508 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
508 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
509 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
509 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
510 _email = Column("email", String(255), nullable=True, unique=None, default=None)
510 _email = Column("email", String(255), nullable=True, unique=None, default=None)
511 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
511 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
512 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
512 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
513 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
513 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
514 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
514 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
515 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
515 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
516 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
516 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
517 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
517 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
518
518
519 user_log = relationship('UserLog')
519 user_log = relationship('UserLog')
520 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
520 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
521
521
522 repositories = relationship('Repository')
522 repositories = relationship('Repository')
523 repository_groups = relationship('RepoGroup')
523 repository_groups = relationship('RepoGroup')
524 user_groups = relationship('UserGroup')
524 user_groups = relationship('UserGroup')
525
525
526 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
526 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
527 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
527 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
528
528
529 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
529 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
530 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
530 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
531 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
531 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
532
532
533 group_member = relationship('UserGroupMember', cascade='all')
533 group_member = relationship('UserGroupMember', cascade='all')
534
534
535 notifications = relationship('UserNotification', cascade='all')
535 notifications = relationship('UserNotification', cascade='all')
536 # notifications assigned to this user
536 # notifications assigned to this user
537 user_created_notifications = relationship('Notification', cascade='all')
537 user_created_notifications = relationship('Notification', cascade='all')
538 # comments created by this user
538 # comments created by this user
539 user_comments = relationship('ChangesetComment', cascade='all')
539 user_comments = relationship('ChangesetComment', cascade='all')
540 # user profile extra info
540 # user profile extra info
541 user_emails = relationship('UserEmailMap', cascade='all')
541 user_emails = relationship('UserEmailMap', cascade='all')
542 user_ip_map = relationship('UserIpMap', cascade='all')
542 user_ip_map = relationship('UserIpMap', cascade='all')
543 user_auth_tokens = relationship('UserApiKeys', cascade='all')
543 user_auth_tokens = relationship('UserApiKeys', cascade='all')
544 # gists
544 # gists
545 user_gists = relationship('Gist', cascade='all')
545 user_gists = relationship('Gist', cascade='all')
546 # user pull requests
546 # user pull requests
547 user_pull_requests = relationship('PullRequest', cascade='all')
547 user_pull_requests = relationship('PullRequest', cascade='all')
548 # external identities
548 # external identities
549 extenal_identities = relationship(
549 extenal_identities = relationship(
550 'ExternalIdentity',
550 'ExternalIdentity',
551 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
551 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
552 cascade='all')
552 cascade='all')
553
553
554 def __unicode__(self):
554 def __unicode__(self):
555 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
555 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
556 self.user_id, self.username)
556 self.user_id, self.username)
557
557
558 @hybrid_property
558 @hybrid_property
559 def email(self):
559 def email(self):
560 return self._email
560 return self._email
561
561
562 @email.setter
562 @email.setter
563 def email(self, val):
563 def email(self, val):
564 self._email = val.lower() if val else None
564 self._email = val.lower() if val else None
565
565
566 @property
566 @property
567 def firstname(self):
567 def firstname(self):
568 # alias for future
568 # alias for future
569 return self.name
569 return self.name
570
570
571 @property
571 @property
572 def emails(self):
572 def emails(self):
573 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
573 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
574 return [self.email] + [x.email for x in other]
574 return [self.email] + [x.email for x in other]
575
575
576 @property
576 @property
577 def auth_tokens(self):
577 def auth_tokens(self):
578 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
578 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
579
579
580 @property
580 @property
581 def extra_auth_tokens(self):
581 def extra_auth_tokens(self):
582 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
582 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
583
583
584 @property
584 @property
585 def feed_token(self):
585 def feed_token(self):
586 feed_tokens = UserApiKeys.query()\
586 feed_tokens = UserApiKeys.query()\
587 .filter(UserApiKeys.user == self)\
587 .filter(UserApiKeys.user == self)\
588 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
588 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
589 .all()
589 .all()
590 if feed_tokens:
590 if feed_tokens:
591 return feed_tokens[0].api_key
591 return feed_tokens[0].api_key
592 else:
592 else:
593 # use the main token so we don't end up with nothing...
593 # use the main token so we don't end up with nothing...
594 return self.api_key
594 return self.api_key
595
595
596 @classmethod
596 @classmethod
597 def extra_valid_auth_tokens(cls, user, role=None):
597 def extra_valid_auth_tokens(cls, user, role=None):
598 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
598 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
599 .filter(or_(UserApiKeys.expires == -1,
599 .filter(or_(UserApiKeys.expires == -1,
600 UserApiKeys.expires >= time.time()))
600 UserApiKeys.expires >= time.time()))
601 if role:
601 if role:
602 tokens = tokens.filter(or_(UserApiKeys.role == role,
602 tokens = tokens.filter(or_(UserApiKeys.role == role,
603 UserApiKeys.role == UserApiKeys.ROLE_ALL))
603 UserApiKeys.role == UserApiKeys.ROLE_ALL))
604 return tokens.all()
604 return tokens.all()
605
605
606 @property
606 @property
607 def ip_addresses(self):
607 def ip_addresses(self):
608 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
608 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
609 return [x.ip_addr for x in ret]
609 return [x.ip_addr for x in ret]
610
610
611 @property
611 @property
612 def username_and_name(self):
612 def username_and_name(self):
613 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
613 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
614
614
615 @property
615 @property
616 def username_or_name_or_email(self):
616 def username_or_name_or_email(self):
617 full_name = self.full_name if self.full_name is not ' ' else None
617 full_name = self.full_name if self.full_name is not ' ' else None
618 return self.username or full_name or self.email
618 return self.username or full_name or self.email
619
619
620 @property
620 @property
621 def full_name(self):
621 def full_name(self):
622 return '%s %s' % (self.firstname, self.lastname)
622 return '%s %s' % (self.firstname, self.lastname)
623
623
624 @property
624 @property
625 def full_name_or_username(self):
625 def full_name_or_username(self):
626 return ('%s %s' % (self.firstname, self.lastname)
626 return ('%s %s' % (self.firstname, self.lastname)
627 if (self.firstname and self.lastname) else self.username)
627 if (self.firstname and self.lastname) else self.username)
628
628
629 @property
629 @property
630 def full_contact(self):
630 def full_contact(self):
631 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
631 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
632
632
633 @property
633 @property
634 def short_contact(self):
634 def short_contact(self):
635 return '%s %s' % (self.firstname, self.lastname)
635 return '%s %s' % (self.firstname, self.lastname)
636
636
637 @property
637 @property
638 def is_admin(self):
638 def is_admin(self):
639 return self.admin
639 return self.admin
640
640
641 @property
641 @property
642 def AuthUser(self):
642 def AuthUser(self):
643 """
643 """
644 Returns instance of AuthUser for this user
644 Returns instance of AuthUser for this user
645 """
645 """
646 from rhodecode.lib.auth import AuthUser
646 from rhodecode.lib.auth import AuthUser
647 return AuthUser(user_id=self.user_id, api_key=self.api_key,
647 return AuthUser(user_id=self.user_id, api_key=self.api_key,
648 username=self.username)
648 username=self.username)
649
649
650 @hybrid_property
650 @hybrid_property
651 def user_data(self):
651 def user_data(self):
652 if not self._user_data:
652 if not self._user_data:
653 return {}
653 return {}
654
654
655 try:
655 try:
656 return json.loads(self._user_data)
656 return json.loads(self._user_data)
657 except TypeError:
657 except TypeError:
658 return {}
658 return {}
659
659
660 @user_data.setter
660 @user_data.setter
661 def user_data(self, val):
661 def user_data(self, val):
662 if not isinstance(val, dict):
662 if not isinstance(val, dict):
663 raise Exception('user_data must be dict, got %s' % type(val))
663 raise Exception('user_data must be dict, got %s' % type(val))
664 try:
664 try:
665 self._user_data = json.dumps(val)
665 self._user_data = json.dumps(val)
666 except Exception:
666 except Exception:
667 log.error(traceback.format_exc())
667 log.error(traceback.format_exc())
668
668
669 @classmethod
669 @classmethod
670 def get_by_username(cls, username, case_insensitive=False,
670 def get_by_username(cls, username, case_insensitive=False,
671 cache=False, identity_cache=False):
671 cache=False, identity_cache=False):
672 session = Session()
672 session = Session()
673
673
674 if case_insensitive:
674 if case_insensitive:
675 q = cls.query().filter(
675 q = cls.query().filter(
676 func.lower(cls.username) == func.lower(username))
676 func.lower(cls.username) == func.lower(username))
677 else:
677 else:
678 q = cls.query().filter(cls.username == username)
678 q = cls.query().filter(cls.username == username)
679
679
680 if cache:
680 if cache:
681 if identity_cache:
681 if identity_cache:
682 val = cls.identity_cache(session, 'username', username)
682 val = cls.identity_cache(session, 'username', username)
683 if val:
683 if val:
684 return val
684 return val
685 else:
685 else:
686 q = q.options(
686 q = q.options(
687 FromCache("sql_cache_short",
687 FromCache("sql_cache_short",
688 "get_user_by_name_%s" % _hash_key(username)))
688 "get_user_by_name_%s" % _hash_key(username)))
689
689
690 return q.scalar()
690 return q.scalar()
691
691
692 @classmethod
692 @classmethod
693 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
693 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
694 q = cls.query().filter(cls.api_key == auth_token)
694 q = cls.query().filter(cls.api_key == auth_token)
695
695
696 if cache:
696 if cache:
697 q = q.options(FromCache("sql_cache_short",
697 q = q.options(FromCache("sql_cache_short",
698 "get_auth_token_%s" % auth_token))
698 "get_auth_token_%s" % auth_token))
699 res = q.scalar()
699 res = q.scalar()
700
700
701 if fallback and not res:
701 if fallback and not res:
702 #fallback to additional keys
702 #fallback to additional keys
703 _res = UserApiKeys.query()\
703 _res = UserApiKeys.query()\
704 .filter(UserApiKeys.api_key == auth_token)\
704 .filter(UserApiKeys.api_key == auth_token)\
705 .filter(or_(UserApiKeys.expires == -1,
705 .filter(or_(UserApiKeys.expires == -1,
706 UserApiKeys.expires >= time.time()))\
706 UserApiKeys.expires >= time.time()))\
707 .first()
707 .first()
708 if _res:
708 if _res:
709 res = _res.user
709 res = _res.user
710 return res
710 return res
711
711
712 @classmethod
712 @classmethod
713 def get_by_email(cls, email, case_insensitive=False, cache=False):
713 def get_by_email(cls, email, case_insensitive=False, cache=False):
714
714
715 if case_insensitive:
715 if case_insensitive:
716 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
716 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
717
717
718 else:
718 else:
719 q = cls.query().filter(cls.email == email)
719 q = cls.query().filter(cls.email == email)
720
720
721 if cache:
721 if cache:
722 q = q.options(FromCache("sql_cache_short",
722 q = q.options(FromCache("sql_cache_short",
723 "get_email_key_%s" % email))
723 "get_email_key_%s" % email))
724
724
725 ret = q.scalar()
725 ret = q.scalar()
726 if ret is None:
726 if ret is None:
727 q = UserEmailMap.query()
727 q = UserEmailMap.query()
728 # try fetching in alternate email map
728 # try fetching in alternate email map
729 if case_insensitive:
729 if case_insensitive:
730 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
730 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
731 else:
731 else:
732 q = q.filter(UserEmailMap.email == email)
732 q = q.filter(UserEmailMap.email == email)
733 q = q.options(joinedload(UserEmailMap.user))
733 q = q.options(joinedload(UserEmailMap.user))
734 if cache:
734 if cache:
735 q = q.options(FromCache("sql_cache_short",
735 q = q.options(FromCache("sql_cache_short",
736 "get_email_map_key_%s" % email))
736 "get_email_map_key_%s" % email))
737 ret = getattr(q.scalar(), 'user', None)
737 ret = getattr(q.scalar(), 'user', None)
738
738
739 return ret
739 return ret
740
740
741 @classmethod
741 @classmethod
742 def get_from_cs_author(cls, author):
742 def get_from_cs_author(cls, author):
743 """
743 """
744 Tries to get User objects out of commit author string
744 Tries to get User objects out of commit author string
745
745
746 :param author:
746 :param author:
747 """
747 """
748 from rhodecode.lib.helpers import email, author_name
748 from rhodecode.lib.helpers import email, author_name
749 # Valid email in the attribute passed, see if they're in the system
749 # Valid email in the attribute passed, see if they're in the system
750 _email = email(author)
750 _email = email(author)
751 if _email:
751 if _email:
752 user = cls.get_by_email(_email, case_insensitive=True)
752 user = cls.get_by_email(_email, case_insensitive=True)
753 if user:
753 if user:
754 return user
754 return user
755 # Maybe we can match by username?
755 # Maybe we can match by username?
756 _author = author_name(author)
756 _author = author_name(author)
757 user = cls.get_by_username(_author, case_insensitive=True)
757 user = cls.get_by_username(_author, case_insensitive=True)
758 if user:
758 if user:
759 return user
759 return user
760
760
761 def update_userdata(self, **kwargs):
761 def update_userdata(self, **kwargs):
762 usr = self
762 usr = self
763 old = usr.user_data
763 old = usr.user_data
764 old.update(**kwargs)
764 old.update(**kwargs)
765 usr.user_data = old
765 usr.user_data = old
766 Session().add(usr)
766 Session().add(usr)
767 log.debug('updated userdata with ', kwargs)
767 log.debug('updated userdata with ', kwargs)
768
768
769 def update_lastlogin(self):
769 def update_lastlogin(self):
770 """Update user lastlogin"""
770 """Update user lastlogin"""
771 self.last_login = datetime.datetime.now()
771 self.last_login = datetime.datetime.now()
772 Session().add(self)
772 Session().add(self)
773 log.debug('updated user %s lastlogin', self.username)
773 log.debug('updated user %s lastlogin', self.username)
774
774
775 def update_lastactivity(self):
775 def update_lastactivity(self):
776 """Update user lastactivity"""
776 """Update user lastactivity"""
777 usr = self
777 usr = self
778 old = usr.user_data
778 old = usr.user_data
779 old.update({'last_activity': time.time()})
779 old.update({'last_activity': time.time()})
780 usr.user_data = old
780 usr.user_data = old
781 Session().add(usr)
781 Session().add(usr)
782 log.debug('updated user %s lastactivity', usr.username)
782 log.debug('updated user %s lastactivity', usr.username)
783
783
784 def update_password(self, new_password, change_api_key=False):
784 def update_password(self, new_password, change_api_key=False):
785 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
785 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
786
786
787 self.password = get_crypt_password(new_password)
787 self.password = get_crypt_password(new_password)
788 if change_api_key:
788 if change_api_key:
789 self.api_key = generate_auth_token(self.username)
789 self.api_key = generate_auth_token(self.username)
790 Session().add(self)
790 Session().add(self)
791
791
792 @classmethod
792 @classmethod
793 def get_first_super_admin(cls):
793 def get_first_super_admin(cls):
794 user = User.query().filter(User.admin == true()).first()
794 user = User.query().filter(User.admin == true()).first()
795 if user is None:
795 if user is None:
796 raise Exception('FATAL: Missing administrative account!')
796 raise Exception('FATAL: Missing administrative account!')
797 return user
797 return user
798
798
799 @classmethod
799 @classmethod
800 def get_all_super_admins(cls):
800 def get_all_super_admins(cls):
801 """
801 """
802 Returns all admin accounts sorted by username
802 Returns all admin accounts sorted by username
803 """
803 """
804 return User.query().filter(User.admin == true())\
804 return User.query().filter(User.admin == true())\
805 .order_by(User.username.asc()).all()
805 .order_by(User.username.asc()).all()
806
806
807 @classmethod
807 @classmethod
808 def get_default_user(cls, cache=False):
808 def get_default_user(cls, cache=False):
809 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
809 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
810 if user is None:
810 if user is None:
811 raise Exception('FATAL: Missing default account!')
811 raise Exception('FATAL: Missing default account!')
812 return user
812 return user
813
813
814 def _get_default_perms(self, user, suffix=''):
814 def _get_default_perms(self, user, suffix=''):
815 from rhodecode.model.permission import PermissionModel
815 from rhodecode.model.permission import PermissionModel
816 return PermissionModel().get_default_perms(user.user_perms, suffix)
816 return PermissionModel().get_default_perms(user.user_perms, suffix)
817
817
818 def get_default_perms(self, suffix=''):
818 def get_default_perms(self, suffix=''):
819 return self._get_default_perms(self, suffix)
819 return self._get_default_perms(self, suffix)
820
820
821 def get_api_data(self, include_secrets=False, details='full'):
821 def get_api_data(self, include_secrets=False, details='full'):
822 """
822 """
823 Common function for generating user related data for API
823 Common function for generating user related data for API
824
824
825 :param include_secrets: By default secrets in the API data will be replaced
825 :param include_secrets: By default secrets in the API data will be replaced
826 by a placeholder value to prevent exposing this data by accident. In case
826 by a placeholder value to prevent exposing this data by accident. In case
827 this data shall be exposed, set this flag to ``True``.
827 this data shall be exposed, set this flag to ``True``.
828
828
829 :param details: details can be 'basic|full' basic gives only a subset of
829 :param details: details can be 'basic|full' basic gives only a subset of
830 the available user information that includes user_id, name and emails.
830 the available user information that includes user_id, name and emails.
831 """
831 """
832 user = self
832 user = self
833 user_data = self.user_data
833 user_data = self.user_data
834 data = {
834 data = {
835 'user_id': user.user_id,
835 'user_id': user.user_id,
836 'username': user.username,
836 'username': user.username,
837 'firstname': user.name,
837 'firstname': user.name,
838 'lastname': user.lastname,
838 'lastname': user.lastname,
839 'email': user.email,
839 'email': user.email,
840 'emails': user.emails,
840 'emails': user.emails,
841 }
841 }
842 if details == 'basic':
842 if details == 'basic':
843 return data
843 return data
844
844
845 api_key_length = 40
845 api_key_length = 40
846 api_key_replacement = '*' * api_key_length
846 api_key_replacement = '*' * api_key_length
847
847
848 extras = {
848 extras = {
849 'api_key': api_key_replacement,
849 'api_key': api_key_replacement,
850 'api_keys': [api_key_replacement],
850 'api_keys': [api_key_replacement],
851 'active': user.active,
851 'active': user.active,
852 'admin': user.admin,
852 'admin': user.admin,
853 'extern_type': user.extern_type,
853 'extern_type': user.extern_type,
854 'extern_name': user.extern_name,
854 'extern_name': user.extern_name,
855 'last_login': user.last_login,
855 'last_login': user.last_login,
856 'ip_addresses': user.ip_addresses,
856 'ip_addresses': user.ip_addresses,
857 'language': user_data.get('language')
857 'language': user_data.get('language')
858 }
858 }
859 data.update(extras)
859 data.update(extras)
860
860
861 if include_secrets:
861 if include_secrets:
862 data['api_key'] = user.api_key
862 data['api_key'] = user.api_key
863 data['api_keys'] = user.auth_tokens
863 data['api_keys'] = user.auth_tokens
864 return data
864 return data
865
865
866 def __json__(self):
866 def __json__(self):
867 data = {
867 data = {
868 'full_name': self.full_name,
868 'full_name': self.full_name,
869 'full_name_or_username': self.full_name_or_username,
869 'full_name_or_username': self.full_name_or_username,
870 'short_contact': self.short_contact,
870 'short_contact': self.short_contact,
871 'full_contact': self.full_contact,
871 'full_contact': self.full_contact,
872 }
872 }
873 data.update(self.get_api_data())
873 data.update(self.get_api_data())
874 return data
874 return data
875
875
876
876
877 class UserApiKeys(Base, BaseModel):
877 class UserApiKeys(Base, BaseModel):
878 __tablename__ = 'user_api_keys'
878 __tablename__ = 'user_api_keys'
879 __table_args__ = (
879 __table_args__ = (
880 Index('uak_api_key_idx', 'api_key'),
880 Index('uak_api_key_idx', 'api_key'),
881 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
881 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
882 UniqueConstraint('api_key'),
882 UniqueConstraint('api_key'),
883 {'extend_existing': True, 'mysql_engine': 'InnoDB',
883 {'extend_existing': True, 'mysql_engine': 'InnoDB',
884 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
884 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
885 )
885 )
886 __mapper_args__ = {}
886 __mapper_args__ = {}
887
887
888 # ApiKey role
888 # ApiKey role
889 ROLE_ALL = 'token_role_all'
889 ROLE_ALL = 'token_role_all'
890 ROLE_HTTP = 'token_role_http'
890 ROLE_HTTP = 'token_role_http'
891 ROLE_VCS = 'token_role_vcs'
891 ROLE_VCS = 'token_role_vcs'
892 ROLE_API = 'token_role_api'
892 ROLE_API = 'token_role_api'
893 ROLE_FEED = 'token_role_feed'
893 ROLE_FEED = 'token_role_feed'
894 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
894 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
895
895
896 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
896 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
897 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
897 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
898 api_key = Column("api_key", String(255), nullable=False, unique=True)
898 api_key = Column("api_key", String(255), nullable=False, unique=True)
899 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
899 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
900 expires = Column('expires', Float(53), nullable=False)
900 expires = Column('expires', Float(53), nullable=False)
901 role = Column('role', String(255), nullable=True)
901 role = Column('role', String(255), nullable=True)
902 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
902 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
903
903
904 user = relationship('User', lazy='joined')
904 user = relationship('User', lazy='joined')
905
905
906 @classmethod
906 @classmethod
907 def _get_role_name(cls, role):
907 def _get_role_name(cls, role):
908 return {
908 return {
909 cls.ROLE_ALL: _('all'),
909 cls.ROLE_ALL: _('all'),
910 cls.ROLE_HTTP: _('http/web interface'),
910 cls.ROLE_HTTP: _('http/web interface'),
911 cls.ROLE_VCS: _('vcs (git/hg protocol)'),
911 cls.ROLE_VCS: _('vcs (git/hg protocol)'),
912 cls.ROLE_API: _('api calls'),
912 cls.ROLE_API: _('api calls'),
913 cls.ROLE_FEED: _('feed access'),
913 cls.ROLE_FEED: _('feed access'),
914 }.get(role, role)
914 }.get(role, role)
915
915
916 @property
916 @property
917 def expired(self):
917 def expired(self):
918 if self.expires == -1:
918 if self.expires == -1:
919 return False
919 return False
920 return time.time() > self.expires
920 return time.time() > self.expires
921
921
922 @property
922 @property
923 def role_humanized(self):
923 def role_humanized(self):
924 return self._get_role_name(self.role)
924 return self._get_role_name(self.role)
925
925
926
926
927 class UserEmailMap(Base, BaseModel):
927 class UserEmailMap(Base, BaseModel):
928 __tablename__ = 'user_email_map'
928 __tablename__ = 'user_email_map'
929 __table_args__ = (
929 __table_args__ = (
930 Index('uem_email_idx', 'email'),
930 Index('uem_email_idx', 'email'),
931 UniqueConstraint('email'),
931 UniqueConstraint('email'),
932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
934 )
934 )
935 __mapper_args__ = {}
935 __mapper_args__ = {}
936
936
937 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
937 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
938 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
938 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
939 _email = Column("email", String(255), nullable=True, unique=False, default=None)
939 _email = Column("email", String(255), nullable=True, unique=False, default=None)
940 user = relationship('User', lazy='joined')
940 user = relationship('User', lazy='joined')
941
941
942 @validates('_email')
942 @validates('_email')
943 def validate_email(self, key, email):
943 def validate_email(self, key, email):
944 # check if this email is not main one
944 # check if this email is not main one
945 main_email = Session().query(User).filter(User.email == email).scalar()
945 main_email = Session().query(User).filter(User.email == email).scalar()
946 if main_email is not None:
946 if main_email is not None:
947 raise AttributeError('email %s is present is user table' % email)
947 raise AttributeError('email %s is present is user table' % email)
948 return email
948 return email
949
949
950 @hybrid_property
950 @hybrid_property
951 def email(self):
951 def email(self):
952 return self._email
952 return self._email
953
953
954 @email.setter
954 @email.setter
955 def email(self, val):
955 def email(self, val):
956 self._email = val.lower() if val else None
956 self._email = val.lower() if val else None
957
957
958
958
959 class UserIpMap(Base, BaseModel):
959 class UserIpMap(Base, BaseModel):
960 __tablename__ = 'user_ip_map'
960 __tablename__ = 'user_ip_map'
961 __table_args__ = (
961 __table_args__ = (
962 UniqueConstraint('user_id', 'ip_addr'),
962 UniqueConstraint('user_id', 'ip_addr'),
963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
965 )
965 )
966 __mapper_args__ = {}
966 __mapper_args__ = {}
967
967
968 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
968 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
969 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
969 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
970 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
970 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
971 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
971 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
972 description = Column("description", String(10000), nullable=True, unique=None, default=None)
972 description = Column("description", String(10000), nullable=True, unique=None, default=None)
973 user = relationship('User', lazy='joined')
973 user = relationship('User', lazy='joined')
974
974
975 @classmethod
975 @classmethod
976 def _get_ip_range(cls, ip_addr):
976 def _get_ip_range(cls, ip_addr):
977 net = ipaddress.ip_network(ip_addr, strict=False)
977 net = ipaddress.ip_network(ip_addr, strict=False)
978 return [str(net.network_address), str(net.broadcast_address)]
978 return [str(net.network_address), str(net.broadcast_address)]
979
979
980 def __json__(self):
980 def __json__(self):
981 return {
981 return {
982 'ip_addr': self.ip_addr,
982 'ip_addr': self.ip_addr,
983 'ip_range': self._get_ip_range(self.ip_addr),
983 'ip_range': self._get_ip_range(self.ip_addr),
984 }
984 }
985
985
986 def __unicode__(self):
986 def __unicode__(self):
987 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
987 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
988 self.user_id, self.ip_addr)
988 self.user_id, self.ip_addr)
989
989
990 class UserLog(Base, BaseModel):
990 class UserLog(Base, BaseModel):
991 __tablename__ = 'user_logs'
991 __tablename__ = 'user_logs'
992 __table_args__ = (
992 __table_args__ = (
993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
995 )
995 )
996 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
996 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
998 username = Column("username", String(255), nullable=True, unique=None, default=None)
998 username = Column("username", String(255), nullable=True, unique=None, default=None)
999 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
999 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1000 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1000 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1001 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1001 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1002 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1002 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1003 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1003 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1004
1004
1005 def __unicode__(self):
1005 def __unicode__(self):
1006 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1006 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1007 self.repository_name,
1007 self.repository_name,
1008 self.action)
1008 self.action)
1009
1009
1010 @property
1010 @property
1011 def action_as_day(self):
1011 def action_as_day(self):
1012 return datetime.date(*self.action_date.timetuple()[:3])
1012 return datetime.date(*self.action_date.timetuple()[:3])
1013
1013
1014 user = relationship('User')
1014 user = relationship('User')
1015 repository = relationship('Repository', cascade='')
1015 repository = relationship('Repository', cascade='')
1016
1016
1017
1017
1018 class UserGroup(Base, BaseModel):
1018 class UserGroup(Base, BaseModel):
1019 __tablename__ = 'users_groups'
1019 __tablename__ = 'users_groups'
1020 __table_args__ = (
1020 __table_args__ = (
1021 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1021 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1022 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1022 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1023 )
1023 )
1024
1024
1025 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1025 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1026 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1026 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1027 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1027 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1028 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1028 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1029 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1029 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1030 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1030 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1031 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1031 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1032 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1032 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1033
1033
1034 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1034 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1035 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1035 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1036 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1036 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1037 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1037 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1038 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1038 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1039 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1039 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1040
1040
1041 user = relationship('User')
1041 user = relationship('User')
1042
1042
1043 @hybrid_property
1043 @hybrid_property
1044 def group_data(self):
1044 def group_data(self):
1045 if not self._group_data:
1045 if not self._group_data:
1046 return {}
1046 return {}
1047
1047
1048 try:
1048 try:
1049 return json.loads(self._group_data)
1049 return json.loads(self._group_data)
1050 except TypeError:
1050 except TypeError:
1051 return {}
1051 return {}
1052
1052
1053 @group_data.setter
1053 @group_data.setter
1054 def group_data(self, val):
1054 def group_data(self, val):
1055 try:
1055 try:
1056 self._group_data = json.dumps(val)
1056 self._group_data = json.dumps(val)
1057 except Exception:
1057 except Exception:
1058 log.error(traceback.format_exc())
1058 log.error(traceback.format_exc())
1059
1059
1060 def __unicode__(self):
1060 def __unicode__(self):
1061 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1061 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1062 self.users_group_id,
1062 self.users_group_id,
1063 self.users_group_name)
1063 self.users_group_name)
1064
1064
1065 @classmethod
1065 @classmethod
1066 def get_by_group_name(cls, group_name, cache=False,
1066 def get_by_group_name(cls, group_name, cache=False,
1067 case_insensitive=False):
1067 case_insensitive=False):
1068 if case_insensitive:
1068 if case_insensitive:
1069 q = cls.query().filter(func.lower(cls.users_group_name) ==
1069 q = cls.query().filter(func.lower(cls.users_group_name) ==
1070 func.lower(group_name))
1070 func.lower(group_name))
1071
1071
1072 else:
1072 else:
1073 q = cls.query().filter(cls.users_group_name == group_name)
1073 q = cls.query().filter(cls.users_group_name == group_name)
1074 if cache:
1074 if cache:
1075 q = q.options(FromCache(
1075 q = q.options(FromCache(
1076 "sql_cache_short",
1076 "sql_cache_short",
1077 "get_group_%s" % _hash_key(group_name)))
1077 "get_group_%s" % _hash_key(group_name)))
1078 return q.scalar()
1078 return q.scalar()
1079
1079
1080 @classmethod
1080 @classmethod
1081 def get(cls, user_group_id, cache=False):
1081 def get(cls, user_group_id, cache=False):
1082 user_group = cls.query()
1082 user_group = cls.query()
1083 if cache:
1083 if cache:
1084 user_group = user_group.options(FromCache("sql_cache_short",
1084 user_group = user_group.options(FromCache("sql_cache_short",
1085 "get_users_group_%s" % user_group_id))
1085 "get_users_group_%s" % user_group_id))
1086 return user_group.get(user_group_id)
1086 return user_group.get(user_group_id)
1087
1087
1088 def permissions(self, with_admins=True, with_owner=True):
1088 def permissions(self, with_admins=True, with_owner=True):
1089 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1089 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1090 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1090 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1091 joinedload(UserUserGroupToPerm.user),
1091 joinedload(UserUserGroupToPerm.user),
1092 joinedload(UserUserGroupToPerm.permission),)
1092 joinedload(UserUserGroupToPerm.permission),)
1093
1093
1094 # get owners and admins and permissions. We do a trick of re-writing
1094 # get owners and admins and permissions. We do a trick of re-writing
1095 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1095 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1096 # has a global reference and changing one object propagates to all
1096 # has a global reference and changing one object propagates to all
1097 # others. This means if admin is also an owner admin_row that change
1097 # others. This means if admin is also an owner admin_row that change
1098 # would propagate to both objects
1098 # would propagate to both objects
1099 perm_rows = []
1099 perm_rows = []
1100 for _usr in q.all():
1100 for _usr in q.all():
1101 usr = AttributeDict(_usr.user.get_dict())
1101 usr = AttributeDict(_usr.user.get_dict())
1102 usr.permission = _usr.permission.permission_name
1102 usr.permission = _usr.permission.permission_name
1103 perm_rows.append(usr)
1103 perm_rows.append(usr)
1104
1104
1105 # filter the perm rows by 'default' first and then sort them by
1105 # filter the perm rows by 'default' first and then sort them by
1106 # admin,write,read,none permissions sorted again alphabetically in
1106 # admin,write,read,none permissions sorted again alphabetically in
1107 # each group
1107 # each group
1108 perm_rows = sorted(perm_rows, key=display_sort)
1108 perm_rows = sorted(perm_rows, key=display_sort)
1109
1109
1110 _admin_perm = 'usergroup.admin'
1110 _admin_perm = 'usergroup.admin'
1111 owner_row = []
1111 owner_row = []
1112 if with_owner:
1112 if with_owner:
1113 usr = AttributeDict(self.user.get_dict())
1113 usr = AttributeDict(self.user.get_dict())
1114 usr.owner_row = True
1114 usr.owner_row = True
1115 usr.permission = _admin_perm
1115 usr.permission = _admin_perm
1116 owner_row.append(usr)
1116 owner_row.append(usr)
1117
1117
1118 super_admin_rows = []
1118 super_admin_rows = []
1119 if with_admins:
1119 if with_admins:
1120 for usr in User.get_all_super_admins():
1120 for usr in User.get_all_super_admins():
1121 # if this admin is also owner, don't double the record
1121 # if this admin is also owner, don't double the record
1122 if usr.user_id == owner_row[0].user_id:
1122 if usr.user_id == owner_row[0].user_id:
1123 owner_row[0].admin_row = True
1123 owner_row[0].admin_row = True
1124 else:
1124 else:
1125 usr = AttributeDict(usr.get_dict())
1125 usr = AttributeDict(usr.get_dict())
1126 usr.admin_row = True
1126 usr.admin_row = True
1127 usr.permission = _admin_perm
1127 usr.permission = _admin_perm
1128 super_admin_rows.append(usr)
1128 super_admin_rows.append(usr)
1129
1129
1130 return super_admin_rows + owner_row + perm_rows
1130 return super_admin_rows + owner_row + perm_rows
1131
1131
1132 def permission_user_groups(self):
1132 def permission_user_groups(self):
1133 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1133 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1134 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1134 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1135 joinedload(UserGroupUserGroupToPerm.target_user_group),
1135 joinedload(UserGroupUserGroupToPerm.target_user_group),
1136 joinedload(UserGroupUserGroupToPerm.permission),)
1136 joinedload(UserGroupUserGroupToPerm.permission),)
1137
1137
1138 perm_rows = []
1138 perm_rows = []
1139 for _user_group in q.all():
1139 for _user_group in q.all():
1140 usr = AttributeDict(_user_group.user_group.get_dict())
1140 usr = AttributeDict(_user_group.user_group.get_dict())
1141 usr.permission = _user_group.permission.permission_name
1141 usr.permission = _user_group.permission.permission_name
1142 perm_rows.append(usr)
1142 perm_rows.append(usr)
1143
1143
1144 return perm_rows
1144 return perm_rows
1145
1145
1146 def _get_default_perms(self, user_group, suffix=''):
1146 def _get_default_perms(self, user_group, suffix=''):
1147 from rhodecode.model.permission import PermissionModel
1147 from rhodecode.model.permission import PermissionModel
1148 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1148 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1149
1149
1150 def get_default_perms(self, suffix=''):
1150 def get_default_perms(self, suffix=''):
1151 return self._get_default_perms(self, suffix)
1151 return self._get_default_perms(self, suffix)
1152
1152
1153 def get_api_data(self, with_group_members=True, include_secrets=False):
1153 def get_api_data(self, with_group_members=True, include_secrets=False):
1154 """
1154 """
1155 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1155 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1156 basically forwarded.
1156 basically forwarded.
1157
1157
1158 """
1158 """
1159 user_group = self
1159 user_group = self
1160
1160
1161 data = {
1161 data = {
1162 'users_group_id': user_group.users_group_id,
1162 'users_group_id': user_group.users_group_id,
1163 'group_name': user_group.users_group_name,
1163 'group_name': user_group.users_group_name,
1164 'group_description': user_group.user_group_description,
1164 'group_description': user_group.user_group_description,
1165 'active': user_group.users_group_active,
1165 'active': user_group.users_group_active,
1166 'owner': user_group.user.username,
1166 'owner': user_group.user.username,
1167 }
1167 }
1168 if with_group_members:
1168 if with_group_members:
1169 users = []
1169 users = []
1170 for user in user_group.members:
1170 for user in user_group.members:
1171 user = user.user
1171 user = user.user
1172 users.append(user.get_api_data(include_secrets=include_secrets))
1172 users.append(user.get_api_data(include_secrets=include_secrets))
1173 data['users'] = users
1173 data['users'] = users
1174
1174
1175 return data
1175 return data
1176
1176
1177
1177
1178 class UserGroupMember(Base, BaseModel):
1178 class UserGroupMember(Base, BaseModel):
1179 __tablename__ = 'users_groups_members'
1179 __tablename__ = 'users_groups_members'
1180 __table_args__ = (
1180 __table_args__ = (
1181 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1181 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1182 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1182 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1183 )
1183 )
1184
1184
1185 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1185 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1186 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1186 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1187 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1187 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1188
1188
1189 user = relationship('User', lazy='joined')
1189 user = relationship('User', lazy='joined')
1190 users_group = relationship('UserGroup')
1190 users_group = relationship('UserGroup')
1191
1191
1192 def __init__(self, gr_id='', u_id=''):
1192 def __init__(self, gr_id='', u_id=''):
1193 self.users_group_id = gr_id
1193 self.users_group_id = gr_id
1194 self.user_id = u_id
1194 self.user_id = u_id
1195
1195
1196
1196
1197 class RepositoryField(Base, BaseModel):
1197 class RepositoryField(Base, BaseModel):
1198 __tablename__ = 'repositories_fields'
1198 __tablename__ = 'repositories_fields'
1199 __table_args__ = (
1199 __table_args__ = (
1200 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1200 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1201 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1201 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1202 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1202 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1203 )
1203 )
1204 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1204 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1205
1205
1206 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1206 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1207 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1207 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1208 field_key = Column("field_key", String(250))
1208 field_key = Column("field_key", String(250))
1209 field_label = Column("field_label", String(1024), nullable=False)
1209 field_label = Column("field_label", String(1024), nullable=False)
1210 field_value = Column("field_value", String(10000), nullable=False)
1210 field_value = Column("field_value", String(10000), nullable=False)
1211 field_desc = Column("field_desc", String(1024), nullable=False)
1211 field_desc = Column("field_desc", String(1024), nullable=False)
1212 field_type = Column("field_type", String(255), nullable=False, unique=None)
1212 field_type = Column("field_type", String(255), nullable=False, unique=None)
1213 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1213 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1214
1214
1215 repository = relationship('Repository')
1215 repository = relationship('Repository')
1216
1216
1217 @property
1217 @property
1218 def field_key_prefixed(self):
1218 def field_key_prefixed(self):
1219 return 'ex_%s' % self.field_key
1219 return 'ex_%s' % self.field_key
1220
1220
1221 @classmethod
1221 @classmethod
1222 def un_prefix_key(cls, key):
1222 def un_prefix_key(cls, key):
1223 if key.startswith(cls.PREFIX):
1223 if key.startswith(cls.PREFIX):
1224 return key[len(cls.PREFIX):]
1224 return key[len(cls.PREFIX):]
1225 return key
1225 return key
1226
1226
1227 @classmethod
1227 @classmethod
1228 def get_by_key_name(cls, key, repo):
1228 def get_by_key_name(cls, key, repo):
1229 row = cls.query()\
1229 row = cls.query()\
1230 .filter(cls.repository == repo)\
1230 .filter(cls.repository == repo)\
1231 .filter(cls.field_key == key).scalar()
1231 .filter(cls.field_key == key).scalar()
1232 return row
1232 return row
1233
1233
1234
1234
1235 class Repository(Base, BaseModel):
1235 class Repository(Base, BaseModel):
1236 __tablename__ = 'repositories'
1236 __tablename__ = 'repositories'
1237 __table_args__ = (
1237 __table_args__ = (
1238 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1238 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1239 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1239 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1240 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1240 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1241 )
1241 )
1242 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1242 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1243 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1243 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1244
1244
1245 STATE_CREATED = 'repo_state_created'
1245 STATE_CREATED = 'repo_state_created'
1246 STATE_PENDING = 'repo_state_pending'
1246 STATE_PENDING = 'repo_state_pending'
1247 STATE_ERROR = 'repo_state_error'
1247 STATE_ERROR = 'repo_state_error'
1248
1248
1249 LOCK_AUTOMATIC = 'lock_auto'
1249 LOCK_AUTOMATIC = 'lock_auto'
1250 LOCK_API = 'lock_api'
1250 LOCK_API = 'lock_api'
1251 LOCK_WEB = 'lock_web'
1251 LOCK_WEB = 'lock_web'
1252 LOCK_PULL = 'lock_pull'
1252 LOCK_PULL = 'lock_pull'
1253
1253
1254 NAME_SEP = URL_SEP
1254 NAME_SEP = URL_SEP
1255
1255
1256 repo_id = Column(
1256 repo_id = Column(
1257 "repo_id", Integer(), nullable=False, unique=True, default=None,
1257 "repo_id", Integer(), nullable=False, unique=True, default=None,
1258 primary_key=True)
1258 primary_key=True)
1259 _repo_name = Column(
1259 _repo_name = Column(
1260 "repo_name", Text(), nullable=False, default=None)
1260 "repo_name", Text(), nullable=False, default=None)
1261 _repo_name_hash = Column(
1261 _repo_name_hash = Column(
1262 "repo_name_hash", String(255), nullable=False, unique=True)
1262 "repo_name_hash", String(255), nullable=False, unique=True)
1263 repo_state = Column("repo_state", String(255), nullable=True)
1263 repo_state = Column("repo_state", String(255), nullable=True)
1264
1264
1265 clone_uri = Column(
1265 clone_uri = Column(
1266 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1266 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1267 default=None)
1267 default=None)
1268 repo_type = Column(
1268 repo_type = Column(
1269 "repo_type", String(255), nullable=False, unique=False, default=None)
1269 "repo_type", String(255), nullable=False, unique=False, default=None)
1270 user_id = Column(
1270 user_id = Column(
1271 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1271 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1272 unique=False, default=None)
1272 unique=False, default=None)
1273 private = Column(
1273 private = Column(
1274 "private", Boolean(), nullable=True, unique=None, default=None)
1274 "private", Boolean(), nullable=True, unique=None, default=None)
1275 enable_statistics = Column(
1275 enable_statistics = Column(
1276 "statistics", Boolean(), nullable=True, unique=None, default=True)
1276 "statistics", Boolean(), nullable=True, unique=None, default=True)
1277 enable_downloads = Column(
1277 enable_downloads = Column(
1278 "downloads", Boolean(), nullable=True, unique=None, default=True)
1278 "downloads", Boolean(), nullable=True, unique=None, default=True)
1279 description = Column(
1279 description = Column(
1280 "description", String(10000), nullable=True, unique=None, default=None)
1280 "description", String(10000), nullable=True, unique=None, default=None)
1281 created_on = Column(
1281 created_on = Column(
1282 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1282 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1283 default=datetime.datetime.now)
1283 default=datetime.datetime.now)
1284 updated_on = Column(
1284 updated_on = Column(
1285 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1285 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1286 default=datetime.datetime.now)
1286 default=datetime.datetime.now)
1287 _landing_revision = Column(
1287 _landing_revision = Column(
1288 "landing_revision", String(255), nullable=False, unique=False,
1288 "landing_revision", String(255), nullable=False, unique=False,
1289 default=None)
1289 default=None)
1290 enable_locking = Column(
1290 enable_locking = Column(
1291 "enable_locking", Boolean(), nullable=False, unique=None,
1291 "enable_locking", Boolean(), nullable=False, unique=None,
1292 default=False)
1292 default=False)
1293 _locked = Column(
1293 _locked = Column(
1294 "locked", String(255), nullable=True, unique=False, default=None)
1294 "locked", String(255), nullable=True, unique=False, default=None)
1295 _changeset_cache = Column(
1295 _changeset_cache = Column(
1296 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1296 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1297
1297
1298 fork_id = Column(
1298 fork_id = Column(
1299 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1299 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1300 nullable=True, unique=False, default=None)
1300 nullable=True, unique=False, default=None)
1301 group_id = Column(
1301 group_id = Column(
1302 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1302 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1303 unique=False, default=None)
1303 unique=False, default=None)
1304
1304
1305 user = relationship('User', lazy='joined')
1305 user = relationship('User', lazy='joined')
1306 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1306 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1307 group = relationship('RepoGroup', lazy='joined')
1307 group = relationship('RepoGroup', lazy='joined')
1308 repo_to_perm = relationship(
1308 repo_to_perm = relationship(
1309 'UserRepoToPerm', cascade='all',
1309 'UserRepoToPerm', cascade='all',
1310 order_by='UserRepoToPerm.repo_to_perm_id')
1310 order_by='UserRepoToPerm.repo_to_perm_id')
1311 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1311 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1312 stats = relationship('Statistics', cascade='all', uselist=False)
1312 stats = relationship('Statistics', cascade='all', uselist=False)
1313
1313
1314 followers = relationship(
1314 followers = relationship(
1315 'UserFollowing',
1315 'UserFollowing',
1316 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1316 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1317 cascade='all')
1317 cascade='all')
1318 extra_fields = relationship(
1318 extra_fields = relationship(
1319 'RepositoryField', cascade="all, delete, delete-orphan")
1319 'RepositoryField', cascade="all, delete, delete-orphan")
1320 logs = relationship('UserLog')
1320 logs = relationship('UserLog')
1321 comments = relationship(
1321 comments = relationship(
1322 'ChangesetComment', cascade="all, delete, delete-orphan")
1322 'ChangesetComment', cascade="all, delete, delete-orphan")
1323 pull_requests_source = relationship(
1323 pull_requests_source = relationship(
1324 'PullRequest',
1324 'PullRequest',
1325 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1325 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1326 cascade="all, delete, delete-orphan")
1326 cascade="all, delete, delete-orphan")
1327 pull_requests_target = relationship(
1327 pull_requests_target = relationship(
1328 'PullRequest',
1328 'PullRequest',
1329 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1329 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1330 cascade="all, delete, delete-orphan")
1330 cascade="all, delete, delete-orphan")
1331 ui = relationship('RepoRhodeCodeUi', cascade="all")
1331 ui = relationship('RepoRhodeCodeUi', cascade="all")
1332 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1332 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1333
1333
1334 def __unicode__(self):
1334 def __unicode__(self):
1335 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1335 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1336 safe_unicode(self.repo_name))
1336 safe_unicode(self.repo_name))
1337
1337
1338 @hybrid_property
1338 @hybrid_property
1339 def landing_rev(self):
1339 def landing_rev(self):
1340 # always should return [rev_type, rev]
1340 # always should return [rev_type, rev]
1341 if self._landing_revision:
1341 if self._landing_revision:
1342 _rev_info = self._landing_revision.split(':')
1342 _rev_info = self._landing_revision.split(':')
1343 if len(_rev_info) < 2:
1343 if len(_rev_info) < 2:
1344 _rev_info.insert(0, 'rev')
1344 _rev_info.insert(0, 'rev')
1345 return [_rev_info[0], _rev_info[1]]
1345 return [_rev_info[0], _rev_info[1]]
1346 return [None, None]
1346 return [None, None]
1347
1347
1348 @landing_rev.setter
1348 @landing_rev.setter
1349 def landing_rev(self, val):
1349 def landing_rev(self, val):
1350 if ':' not in val:
1350 if ':' not in val:
1351 raise ValueError('value must be delimited with `:` and consist '
1351 raise ValueError('value must be delimited with `:` and consist '
1352 'of <rev_type>:<rev>, got %s instead' % val)
1352 'of <rev_type>:<rev>, got %s instead' % val)
1353 self._landing_revision = val
1353 self._landing_revision = val
1354
1354
1355 @hybrid_property
1355 @hybrid_property
1356 def locked(self):
1356 def locked(self):
1357 if self._locked:
1357 if self._locked:
1358 user_id, timelocked, reason = self._locked.split(':')
1358 user_id, timelocked, reason = self._locked.split(':')
1359 lock_values = int(user_id), timelocked, reason
1359 lock_values = int(user_id), timelocked, reason
1360 else:
1360 else:
1361 lock_values = [None, None, None]
1361 lock_values = [None, None, None]
1362 return lock_values
1362 return lock_values
1363
1363
1364 @locked.setter
1364 @locked.setter
1365 def locked(self, val):
1365 def locked(self, val):
1366 if val and isinstance(val, (list, tuple)):
1366 if val and isinstance(val, (list, tuple)):
1367 self._locked = ':'.join(map(str, val))
1367 self._locked = ':'.join(map(str, val))
1368 else:
1368 else:
1369 self._locked = None
1369 self._locked = None
1370
1370
1371 @hybrid_property
1371 @hybrid_property
1372 def changeset_cache(self):
1372 def changeset_cache(self):
1373 from rhodecode.lib.vcs.backends.base import EmptyCommit
1373 from rhodecode.lib.vcs.backends.base import EmptyCommit
1374 dummy = EmptyCommit().__json__()
1374 dummy = EmptyCommit().__json__()
1375 if not self._changeset_cache:
1375 if not self._changeset_cache:
1376 return dummy
1376 return dummy
1377 try:
1377 try:
1378 return json.loads(self._changeset_cache)
1378 return json.loads(self._changeset_cache)
1379 except TypeError:
1379 except TypeError:
1380 return dummy
1380 return dummy
1381 except Exception:
1381 except Exception:
1382 log.error(traceback.format_exc())
1382 log.error(traceback.format_exc())
1383 return dummy
1383 return dummy
1384
1384
1385 @changeset_cache.setter
1385 @changeset_cache.setter
1386 def changeset_cache(self, val):
1386 def changeset_cache(self, val):
1387 try:
1387 try:
1388 self._changeset_cache = json.dumps(val)
1388 self._changeset_cache = json.dumps(val)
1389 except Exception:
1389 except Exception:
1390 log.error(traceback.format_exc())
1390 log.error(traceback.format_exc())
1391
1391
1392 @hybrid_property
1392 @hybrid_property
1393 def repo_name(self):
1393 def repo_name(self):
1394 return self._repo_name
1394 return self._repo_name
1395
1395
1396 @repo_name.setter
1396 @repo_name.setter
1397 def repo_name(self, value):
1397 def repo_name(self, value):
1398 self._repo_name = value
1398 self._repo_name = value
1399 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1399 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1400
1400
1401 @classmethod
1401 @classmethod
1402 def normalize_repo_name(cls, repo_name):
1402 def normalize_repo_name(cls, repo_name):
1403 """
1403 """
1404 Normalizes os specific repo_name to the format internally stored inside
1404 Normalizes os specific repo_name to the format internally stored inside
1405 database using URL_SEP
1405 database using URL_SEP
1406
1406
1407 :param cls:
1407 :param cls:
1408 :param repo_name:
1408 :param repo_name:
1409 """
1409 """
1410 return cls.NAME_SEP.join(repo_name.split(os.sep))
1410 return cls.NAME_SEP.join(repo_name.split(os.sep))
1411
1411
1412 @classmethod
1412 @classmethod
1413 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1413 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1414 session = Session()
1414 session = Session()
1415 q = session.query(cls).filter(cls.repo_name == repo_name)
1415 q = session.query(cls).filter(cls.repo_name == repo_name)
1416
1416
1417 if cache:
1417 if cache:
1418 if identity_cache:
1418 if identity_cache:
1419 val = cls.identity_cache(session, 'repo_name', repo_name)
1419 val = cls.identity_cache(session, 'repo_name', repo_name)
1420 if val:
1420 if val:
1421 return val
1421 return val
1422 else:
1422 else:
1423 q = q.options(
1423 q = q.options(
1424 FromCache("sql_cache_short",
1424 FromCache("sql_cache_short",
1425 "get_repo_by_name_%s" % _hash_key(repo_name)))
1425 "get_repo_by_name_%s" % _hash_key(repo_name)))
1426
1426
1427 return q.scalar()
1427 return q.scalar()
1428
1428
1429 @classmethod
1429 @classmethod
1430 def get_by_full_path(cls, repo_full_path):
1430 def get_by_full_path(cls, repo_full_path):
1431 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1431 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1432 repo_name = cls.normalize_repo_name(repo_name)
1432 repo_name = cls.normalize_repo_name(repo_name)
1433 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1433 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1434
1434
1435 @classmethod
1435 @classmethod
1436 def get_repo_forks(cls, repo_id):
1436 def get_repo_forks(cls, repo_id):
1437 return cls.query().filter(Repository.fork_id == repo_id)
1437 return cls.query().filter(Repository.fork_id == repo_id)
1438
1438
1439 @classmethod
1439 @classmethod
1440 def base_path(cls):
1440 def base_path(cls):
1441 """
1441 """
1442 Returns base path when all repos are stored
1442 Returns base path when all repos are stored
1443
1443
1444 :param cls:
1444 :param cls:
1445 """
1445 """
1446 q = Session().query(RhodeCodeUi)\
1446 q = Session().query(RhodeCodeUi)\
1447 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1447 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1448 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1448 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1449 return q.one().ui_value
1449 return q.one().ui_value
1450
1450
1451 @classmethod
1451 @classmethod
1452 def is_valid(cls, repo_name):
1452 def is_valid(cls, repo_name):
1453 """
1453 """
1454 returns True if given repo name is a valid filesystem repository
1454 returns True if given repo name is a valid filesystem repository
1455
1455
1456 :param cls:
1456 :param cls:
1457 :param repo_name:
1457 :param repo_name:
1458 """
1458 """
1459 from rhodecode.lib.utils import is_valid_repo
1459 from rhodecode.lib.utils import is_valid_repo
1460
1460
1461 return is_valid_repo(repo_name, cls.base_path())
1461 return is_valid_repo(repo_name, cls.base_path())
1462
1462
1463 @classmethod
1463 @classmethod
1464 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1464 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1465 case_insensitive=True):
1465 case_insensitive=True):
1466 q = Repository.query()
1466 q = Repository.query()
1467
1467
1468 if not isinstance(user_id, Optional):
1468 if not isinstance(user_id, Optional):
1469 q = q.filter(Repository.user_id == user_id)
1469 q = q.filter(Repository.user_id == user_id)
1470
1470
1471 if not isinstance(group_id, Optional):
1471 if not isinstance(group_id, Optional):
1472 q = q.filter(Repository.group_id == group_id)
1472 q = q.filter(Repository.group_id == group_id)
1473
1473
1474 if case_insensitive:
1474 if case_insensitive:
1475 q = q.order_by(func.lower(Repository.repo_name))
1475 q = q.order_by(func.lower(Repository.repo_name))
1476 else:
1476 else:
1477 q = q.order_by(Repository.repo_name)
1477 q = q.order_by(Repository.repo_name)
1478 return q.all()
1478 return q.all()
1479
1479
1480 @property
1480 @property
1481 def forks(self):
1481 def forks(self):
1482 """
1482 """
1483 Return forks of this repo
1483 Return forks of this repo
1484 """
1484 """
1485 return Repository.get_repo_forks(self.repo_id)
1485 return Repository.get_repo_forks(self.repo_id)
1486
1486
1487 @property
1487 @property
1488 def parent(self):
1488 def parent(self):
1489 """
1489 """
1490 Returns fork parent
1490 Returns fork parent
1491 """
1491 """
1492 return self.fork
1492 return self.fork
1493
1493
1494 @property
1494 @property
1495 def just_name(self):
1495 def just_name(self):
1496 return self.repo_name.split(self.NAME_SEP)[-1]
1496 return self.repo_name.split(self.NAME_SEP)[-1]
1497
1497
1498 @property
1498 @property
1499 def groups_with_parents(self):
1499 def groups_with_parents(self):
1500 groups = []
1500 groups = []
1501 if self.group is None:
1501 if self.group is None:
1502 return groups
1502 return groups
1503
1503
1504 cur_gr = self.group
1504 cur_gr = self.group
1505 groups.insert(0, cur_gr)
1505 groups.insert(0, cur_gr)
1506 while 1:
1506 while 1:
1507 gr = getattr(cur_gr, 'parent_group', None)
1507 gr = getattr(cur_gr, 'parent_group', None)
1508 cur_gr = cur_gr.parent_group
1508 cur_gr = cur_gr.parent_group
1509 if gr is None:
1509 if gr is None:
1510 break
1510 break
1511 groups.insert(0, gr)
1511 groups.insert(0, gr)
1512
1512
1513 return groups
1513 return groups
1514
1514
1515 @property
1515 @property
1516 def groups_and_repo(self):
1516 def groups_and_repo(self):
1517 return self.groups_with_parents, self
1517 return self.groups_with_parents, self
1518
1518
1519 @LazyProperty
1519 @LazyProperty
1520 def repo_path(self):
1520 def repo_path(self):
1521 """
1521 """
1522 Returns base full path for that repository means where it actually
1522 Returns base full path for that repository means where it actually
1523 exists on a filesystem
1523 exists on a filesystem
1524 """
1524 """
1525 q = Session().query(RhodeCodeUi).filter(
1525 q = Session().query(RhodeCodeUi).filter(
1526 RhodeCodeUi.ui_key == self.NAME_SEP)
1526 RhodeCodeUi.ui_key == self.NAME_SEP)
1527 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1527 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1528 return q.one().ui_value
1528 return q.one().ui_value
1529
1529
1530 @property
1530 @property
1531 def repo_full_path(self):
1531 def repo_full_path(self):
1532 p = [self.repo_path]
1532 p = [self.repo_path]
1533 # we need to split the name by / since this is how we store the
1533 # we need to split the name by / since this is how we store the
1534 # names in the database, but that eventually needs to be converted
1534 # names in the database, but that eventually needs to be converted
1535 # into a valid system path
1535 # into a valid system path
1536 p += self.repo_name.split(self.NAME_SEP)
1536 p += self.repo_name.split(self.NAME_SEP)
1537 return os.path.join(*map(safe_unicode, p))
1537 return os.path.join(*map(safe_unicode, p))
1538
1538
1539 @property
1539 @property
1540 def cache_keys(self):
1540 def cache_keys(self):
1541 """
1541 """
1542 Returns associated cache keys for that repo
1542 Returns associated cache keys for that repo
1543 """
1543 """
1544 return CacheKey.query()\
1544 return CacheKey.query()\
1545 .filter(CacheKey.cache_args == self.repo_name)\
1545 .filter(CacheKey.cache_args == self.repo_name)\
1546 .order_by(CacheKey.cache_key)\
1546 .order_by(CacheKey.cache_key)\
1547 .all()
1547 .all()
1548
1548
1549 def get_new_name(self, repo_name):
1549 def get_new_name(self, repo_name):
1550 """
1550 """
1551 returns new full repository name based on assigned group and new new
1551 returns new full repository name based on assigned group and new new
1552
1552
1553 :param group_name:
1553 :param group_name:
1554 """
1554 """
1555 path_prefix = self.group.full_path_splitted if self.group else []
1555 path_prefix = self.group.full_path_splitted if self.group else []
1556 return self.NAME_SEP.join(path_prefix + [repo_name])
1556 return self.NAME_SEP.join(path_prefix + [repo_name])
1557
1557
1558 @property
1558 @property
1559 def _config(self):
1559 def _config(self):
1560 """
1560 """
1561 Returns db based config object.
1561 Returns db based config object.
1562 """
1562 """
1563 from rhodecode.lib.utils import make_db_config
1563 from rhodecode.lib.utils import make_db_config
1564 return make_db_config(clear_session=False, repo=self)
1564 return make_db_config(clear_session=False, repo=self)
1565
1565
1566 def permissions(self, with_admins=True, with_owner=True):
1566 def permissions(self, with_admins=True, with_owner=True):
1567 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1567 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1568 q = q.options(joinedload(UserRepoToPerm.repository),
1568 q = q.options(joinedload(UserRepoToPerm.repository),
1569 joinedload(UserRepoToPerm.user),
1569 joinedload(UserRepoToPerm.user),
1570 joinedload(UserRepoToPerm.permission),)
1570 joinedload(UserRepoToPerm.permission),)
1571
1571
1572 # get owners and admins and permissions. We do a trick of re-writing
1572 # get owners and admins and permissions. We do a trick of re-writing
1573 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1573 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1574 # has a global reference and changing one object propagates to all
1574 # has a global reference and changing one object propagates to all
1575 # others. This means if admin is also an owner admin_row that change
1575 # others. This means if admin is also an owner admin_row that change
1576 # would propagate to both objects
1576 # would propagate to both objects
1577 perm_rows = []
1577 perm_rows = []
1578 for _usr in q.all():
1578 for _usr in q.all():
1579 usr = AttributeDict(_usr.user.get_dict())
1579 usr = AttributeDict(_usr.user.get_dict())
1580 usr.permission = _usr.permission.permission_name
1580 usr.permission = _usr.permission.permission_name
1581 perm_rows.append(usr)
1581 perm_rows.append(usr)
1582
1582
1583 # filter the perm rows by 'default' first and then sort them by
1583 # filter the perm rows by 'default' first and then sort them by
1584 # admin,write,read,none permissions sorted again alphabetically in
1584 # admin,write,read,none permissions sorted again alphabetically in
1585 # each group
1585 # each group
1586 perm_rows = sorted(perm_rows, key=display_sort)
1586 perm_rows = sorted(perm_rows, key=display_sort)
1587
1587
1588 _admin_perm = 'repository.admin'
1588 _admin_perm = 'repository.admin'
1589 owner_row = []
1589 owner_row = []
1590 if with_owner:
1590 if with_owner:
1591 usr = AttributeDict(self.user.get_dict())
1591 usr = AttributeDict(self.user.get_dict())
1592 usr.owner_row = True
1592 usr.owner_row = True
1593 usr.permission = _admin_perm
1593 usr.permission = _admin_perm
1594 owner_row.append(usr)
1594 owner_row.append(usr)
1595
1595
1596 super_admin_rows = []
1596 super_admin_rows = []
1597 if with_admins:
1597 if with_admins:
1598 for usr in User.get_all_super_admins():
1598 for usr in User.get_all_super_admins():
1599 # if this admin is also owner, don't double the record
1599 # if this admin is also owner, don't double the record
1600 if usr.user_id == owner_row[0].user_id:
1600 if usr.user_id == owner_row[0].user_id:
1601 owner_row[0].admin_row = True
1601 owner_row[0].admin_row = True
1602 else:
1602 else:
1603 usr = AttributeDict(usr.get_dict())
1603 usr = AttributeDict(usr.get_dict())
1604 usr.admin_row = True
1604 usr.admin_row = True
1605 usr.permission = _admin_perm
1605 usr.permission = _admin_perm
1606 super_admin_rows.append(usr)
1606 super_admin_rows.append(usr)
1607
1607
1608 return super_admin_rows + owner_row + perm_rows
1608 return super_admin_rows + owner_row + perm_rows
1609
1609
1610 def permission_user_groups(self):
1610 def permission_user_groups(self):
1611 q = UserGroupRepoToPerm.query().filter(
1611 q = UserGroupRepoToPerm.query().filter(
1612 UserGroupRepoToPerm.repository == self)
1612 UserGroupRepoToPerm.repository == self)
1613 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1613 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1614 joinedload(UserGroupRepoToPerm.users_group),
1614 joinedload(UserGroupRepoToPerm.users_group),
1615 joinedload(UserGroupRepoToPerm.permission),)
1615 joinedload(UserGroupRepoToPerm.permission),)
1616
1616
1617 perm_rows = []
1617 perm_rows = []
1618 for _user_group in q.all():
1618 for _user_group in q.all():
1619 usr = AttributeDict(_user_group.users_group.get_dict())
1619 usr = AttributeDict(_user_group.users_group.get_dict())
1620 usr.permission = _user_group.permission.permission_name
1620 usr.permission = _user_group.permission.permission_name
1621 perm_rows.append(usr)
1621 perm_rows.append(usr)
1622
1622
1623 return perm_rows
1623 return perm_rows
1624
1624
1625 def get_api_data(self, include_secrets=False):
1625 def get_api_data(self, include_secrets=False):
1626 """
1626 """
1627 Common function for generating repo api data
1627 Common function for generating repo api data
1628
1628
1629 :param include_secrets: See :meth:`User.get_api_data`.
1629 :param include_secrets: See :meth:`User.get_api_data`.
1630
1630
1631 """
1631 """
1632 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1632 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1633 # move this methods on models level.
1633 # move this methods on models level.
1634 from rhodecode.model.settings import SettingsModel
1634 from rhodecode.model.settings import SettingsModel
1635
1635
1636 repo = self
1636 repo = self
1637 _user_id, _time, _reason = self.locked
1637 _user_id, _time, _reason = self.locked
1638
1638
1639 data = {
1639 data = {
1640 'repo_id': repo.repo_id,
1640 'repo_id': repo.repo_id,
1641 'repo_name': repo.repo_name,
1641 'repo_name': repo.repo_name,
1642 'repo_type': repo.repo_type,
1642 'repo_type': repo.repo_type,
1643 'clone_uri': repo.clone_uri or '',
1643 'clone_uri': repo.clone_uri or '',
1644 'url': url('summary_home', repo_name=self.repo_name, qualified=True),
1644 'private': repo.private,
1645 'private': repo.private,
1645 'created_on': repo.created_on,
1646 'created_on': repo.created_on,
1646 'description': repo.description,
1647 'description': repo.description,
1647 'landing_rev': repo.landing_rev,
1648 'landing_rev': repo.landing_rev,
1648 'owner': repo.user.username,
1649 'owner': repo.user.username,
1649 'fork_of': repo.fork.repo_name if repo.fork else None,
1650 'fork_of': repo.fork.repo_name if repo.fork else None,
1650 'enable_statistics': repo.enable_statistics,
1651 'enable_statistics': repo.enable_statistics,
1651 'enable_locking': repo.enable_locking,
1652 'enable_locking': repo.enable_locking,
1652 'enable_downloads': repo.enable_downloads,
1653 'enable_downloads': repo.enable_downloads,
1653 'last_changeset': repo.changeset_cache,
1654 'last_changeset': repo.changeset_cache,
1654 'locked_by': User.get(_user_id).get_api_data(
1655 'locked_by': User.get(_user_id).get_api_data(
1655 include_secrets=include_secrets) if _user_id else None,
1656 include_secrets=include_secrets) if _user_id else None,
1656 'locked_date': time_to_datetime(_time) if _time else None,
1657 'locked_date': time_to_datetime(_time) if _time else None,
1657 'lock_reason': _reason if _reason else None,
1658 'lock_reason': _reason if _reason else None,
1658 }
1659 }
1659
1660
1660 # TODO: mikhail: should be per-repo settings here
1661 # TODO: mikhail: should be per-repo settings here
1661 rc_config = SettingsModel().get_all_settings()
1662 rc_config = SettingsModel().get_all_settings()
1662 repository_fields = str2bool(
1663 repository_fields = str2bool(
1663 rc_config.get('rhodecode_repository_fields'))
1664 rc_config.get('rhodecode_repository_fields'))
1664 if repository_fields:
1665 if repository_fields:
1665 for f in self.extra_fields:
1666 for f in self.extra_fields:
1666 data[f.field_key_prefixed] = f.field_value
1667 data[f.field_key_prefixed] = f.field_value
1667
1668
1668 return data
1669 return data
1669
1670
1670 @classmethod
1671 @classmethod
1671 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1672 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1672 if not lock_time:
1673 if not lock_time:
1673 lock_time = time.time()
1674 lock_time = time.time()
1674 if not lock_reason:
1675 if not lock_reason:
1675 lock_reason = cls.LOCK_AUTOMATIC
1676 lock_reason = cls.LOCK_AUTOMATIC
1676 repo.locked = [user_id, lock_time, lock_reason]
1677 repo.locked = [user_id, lock_time, lock_reason]
1677 Session().add(repo)
1678 Session().add(repo)
1678 Session().commit()
1679 Session().commit()
1679
1680
1680 @classmethod
1681 @classmethod
1681 def unlock(cls, repo):
1682 def unlock(cls, repo):
1682 repo.locked = None
1683 repo.locked = None
1683 Session().add(repo)
1684 Session().add(repo)
1684 Session().commit()
1685 Session().commit()
1685
1686
1686 @classmethod
1687 @classmethod
1687 def getlock(cls, repo):
1688 def getlock(cls, repo):
1688 return repo.locked
1689 return repo.locked
1689
1690
1690 def is_user_lock(self, user_id):
1691 def is_user_lock(self, user_id):
1691 if self.lock[0]:
1692 if self.lock[0]:
1692 lock_user_id = safe_int(self.lock[0])
1693 lock_user_id = safe_int(self.lock[0])
1693 user_id = safe_int(user_id)
1694 user_id = safe_int(user_id)
1694 # both are ints, and they are equal
1695 # both are ints, and they are equal
1695 return all([lock_user_id, user_id]) and lock_user_id == user_id
1696 return all([lock_user_id, user_id]) and lock_user_id == user_id
1696
1697
1697 return False
1698 return False
1698
1699
1699 def get_locking_state(self, action, user_id, only_when_enabled=True):
1700 def get_locking_state(self, action, user_id, only_when_enabled=True):
1700 """
1701 """
1701 Checks locking on this repository, if locking is enabled and lock is
1702 Checks locking on this repository, if locking is enabled and lock is
1702 present returns a tuple of make_lock, locked, locked_by.
1703 present returns a tuple of make_lock, locked, locked_by.
1703 make_lock can have 3 states None (do nothing) True, make lock
1704 make_lock can have 3 states None (do nothing) True, make lock
1704 False release lock, This value is later propagated to hooks, which
1705 False release lock, This value is later propagated to hooks, which
1705 do the locking. Think about this as signals passed to hooks what to do.
1706 do the locking. Think about this as signals passed to hooks what to do.
1706
1707
1707 """
1708 """
1708 # TODO: johbo: This is part of the business logic and should be moved
1709 # TODO: johbo: This is part of the business logic and should be moved
1709 # into the RepositoryModel.
1710 # into the RepositoryModel.
1710
1711
1711 if action not in ('push', 'pull'):
1712 if action not in ('push', 'pull'):
1712 raise ValueError("Invalid action value: %s" % repr(action))
1713 raise ValueError("Invalid action value: %s" % repr(action))
1713
1714
1714 # defines if locked error should be thrown to user
1715 # defines if locked error should be thrown to user
1715 currently_locked = False
1716 currently_locked = False
1716 # defines if new lock should be made, tri-state
1717 # defines if new lock should be made, tri-state
1717 make_lock = None
1718 make_lock = None
1718 repo = self
1719 repo = self
1719 user = User.get(user_id)
1720 user = User.get(user_id)
1720
1721
1721 lock_info = repo.locked
1722 lock_info = repo.locked
1722
1723
1723 if repo and (repo.enable_locking or not only_when_enabled):
1724 if repo and (repo.enable_locking or not only_when_enabled):
1724 if action == 'push':
1725 if action == 'push':
1725 # check if it's already locked !, if it is compare users
1726 # check if it's already locked !, if it is compare users
1726 locked_by_user_id = lock_info[0]
1727 locked_by_user_id = lock_info[0]
1727 if user.user_id == locked_by_user_id:
1728 if user.user_id == locked_by_user_id:
1728 log.debug(
1729 log.debug(
1729 'Got `push` action from user %s, now unlocking', user)
1730 'Got `push` action from user %s, now unlocking', user)
1730 # unlock if we have push from user who locked
1731 # unlock if we have push from user who locked
1731 make_lock = False
1732 make_lock = False
1732 else:
1733 else:
1733 # we're not the same user who locked, ban with
1734 # we're not the same user who locked, ban with
1734 # code defined in settings (default is 423 HTTP Locked) !
1735 # code defined in settings (default is 423 HTTP Locked) !
1735 log.debug('Repo %s is currently locked by %s', repo, user)
1736 log.debug('Repo %s is currently locked by %s', repo, user)
1736 currently_locked = True
1737 currently_locked = True
1737 elif action == 'pull':
1738 elif action == 'pull':
1738 # [0] user [1] date
1739 # [0] user [1] date
1739 if lock_info[0] and lock_info[1]:
1740 if lock_info[0] and lock_info[1]:
1740 log.debug('Repo %s is currently locked by %s', repo, user)
1741 log.debug('Repo %s is currently locked by %s', repo, user)
1741 currently_locked = True
1742 currently_locked = True
1742 else:
1743 else:
1743 log.debug('Setting lock on repo %s by %s', repo, user)
1744 log.debug('Setting lock on repo %s by %s', repo, user)
1744 make_lock = True
1745 make_lock = True
1745
1746
1746 else:
1747 else:
1747 log.debug('Repository %s do not have locking enabled', repo)
1748 log.debug('Repository %s do not have locking enabled', repo)
1748
1749
1749 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1750 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1750 make_lock, currently_locked, lock_info)
1751 make_lock, currently_locked, lock_info)
1751
1752
1752 from rhodecode.lib.auth import HasRepoPermissionAny
1753 from rhodecode.lib.auth import HasRepoPermissionAny
1753 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1754 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1754 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1755 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1755 # if we don't have at least write permission we cannot make a lock
1756 # if we don't have at least write permission we cannot make a lock
1756 log.debug('lock state reset back to FALSE due to lack '
1757 log.debug('lock state reset back to FALSE due to lack '
1757 'of at least read permission')
1758 'of at least read permission')
1758 make_lock = False
1759 make_lock = False
1759
1760
1760 return make_lock, currently_locked, lock_info
1761 return make_lock, currently_locked, lock_info
1761
1762
1762 @property
1763 @property
1763 def last_db_change(self):
1764 def last_db_change(self):
1764 return self.updated_on
1765 return self.updated_on
1765
1766
1766 @property
1767 @property
1767 def clone_uri_hidden(self):
1768 def clone_uri_hidden(self):
1768 clone_uri = self.clone_uri
1769 clone_uri = self.clone_uri
1769 if clone_uri:
1770 if clone_uri:
1770 import urlobject
1771 import urlobject
1771 url_obj = urlobject.URLObject(clone_uri)
1772 url_obj = urlobject.URLObject(clone_uri)
1772 if url_obj.password:
1773 if url_obj.password:
1773 clone_uri = url_obj.with_password('*****')
1774 clone_uri = url_obj.with_password('*****')
1774 return clone_uri
1775 return clone_uri
1775
1776
1776 def clone_url(self, **override):
1777 def clone_url(self, **override):
1777 qualified_home_url = url('home', qualified=True)
1778 qualified_home_url = url('home', qualified=True)
1778
1779
1779 uri_tmpl = None
1780 uri_tmpl = None
1780 if 'with_id' in override:
1781 if 'with_id' in override:
1781 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1782 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1782 del override['with_id']
1783 del override['with_id']
1783
1784
1784 if 'uri_tmpl' in override:
1785 if 'uri_tmpl' in override:
1785 uri_tmpl = override['uri_tmpl']
1786 uri_tmpl = override['uri_tmpl']
1786 del override['uri_tmpl']
1787 del override['uri_tmpl']
1787
1788
1788 # we didn't override our tmpl from **overrides
1789 # we didn't override our tmpl from **overrides
1789 if not uri_tmpl:
1790 if not uri_tmpl:
1790 uri_tmpl = self.DEFAULT_CLONE_URI
1791 uri_tmpl = self.DEFAULT_CLONE_URI
1791 try:
1792 try:
1792 from pylons import tmpl_context as c
1793 from pylons import tmpl_context as c
1793 uri_tmpl = c.clone_uri_tmpl
1794 uri_tmpl = c.clone_uri_tmpl
1794 except Exception:
1795 except Exception:
1795 # in any case if we call this outside of request context,
1796 # in any case if we call this outside of request context,
1796 # ie, not having tmpl_context set up
1797 # ie, not having tmpl_context set up
1797 pass
1798 pass
1798
1799
1799 return get_clone_url(uri_tmpl=uri_tmpl,
1800 return get_clone_url(uri_tmpl=uri_tmpl,
1800 qualifed_home_url=qualified_home_url,
1801 qualifed_home_url=qualified_home_url,
1801 repo_name=self.repo_name,
1802 repo_name=self.repo_name,
1802 repo_id=self.repo_id, **override)
1803 repo_id=self.repo_id, **override)
1803
1804
1804 def set_state(self, state):
1805 def set_state(self, state):
1805 self.repo_state = state
1806 self.repo_state = state
1806 Session().add(self)
1807 Session().add(self)
1807 #==========================================================================
1808 #==========================================================================
1808 # SCM PROPERTIES
1809 # SCM PROPERTIES
1809 #==========================================================================
1810 #==========================================================================
1810
1811
1811 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1812 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1812 return get_commit_safe(
1813 return get_commit_safe(
1813 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1814 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1814
1815
1815 def get_changeset(self, rev=None, pre_load=None):
1816 def get_changeset(self, rev=None, pre_load=None):
1816 warnings.warn("Use get_commit", DeprecationWarning)
1817 warnings.warn("Use get_commit", DeprecationWarning)
1817 commit_id = None
1818 commit_id = None
1818 commit_idx = None
1819 commit_idx = None
1819 if isinstance(rev, basestring):
1820 if isinstance(rev, basestring):
1820 commit_id = rev
1821 commit_id = rev
1821 else:
1822 else:
1822 commit_idx = rev
1823 commit_idx = rev
1823 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1824 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1824 pre_load=pre_load)
1825 pre_load=pre_load)
1825
1826
1826 def get_landing_commit(self):
1827 def get_landing_commit(self):
1827 """
1828 """
1828 Returns landing commit, or if that doesn't exist returns the tip
1829 Returns landing commit, or if that doesn't exist returns the tip
1829 """
1830 """
1830 _rev_type, _rev = self.landing_rev
1831 _rev_type, _rev = self.landing_rev
1831 commit = self.get_commit(_rev)
1832 commit = self.get_commit(_rev)
1832 if isinstance(commit, EmptyCommit):
1833 if isinstance(commit, EmptyCommit):
1833 return self.get_commit()
1834 return self.get_commit()
1834 return commit
1835 return commit
1835
1836
1836 def update_commit_cache(self, cs_cache=None, config=None):
1837 def update_commit_cache(self, cs_cache=None, config=None):
1837 """
1838 """
1838 Update cache of last changeset for repository, keys should be::
1839 Update cache of last changeset for repository, keys should be::
1839
1840
1840 short_id
1841 short_id
1841 raw_id
1842 raw_id
1842 revision
1843 revision
1843 parents
1844 parents
1844 message
1845 message
1845 date
1846 date
1846 author
1847 author
1847
1848
1848 :param cs_cache:
1849 :param cs_cache:
1849 """
1850 """
1850 from rhodecode.lib.vcs.backends.base import BaseChangeset
1851 from rhodecode.lib.vcs.backends.base import BaseChangeset
1851 if cs_cache is None:
1852 if cs_cache is None:
1852 # use no-cache version here
1853 # use no-cache version here
1853 scm_repo = self.scm_instance(cache=False, config=config)
1854 scm_repo = self.scm_instance(cache=False, config=config)
1854 if scm_repo:
1855 if scm_repo:
1855 cs_cache = scm_repo.get_commit(
1856 cs_cache = scm_repo.get_commit(
1856 pre_load=["author", "date", "message", "parents"])
1857 pre_load=["author", "date", "message", "parents"])
1857 else:
1858 else:
1858 cs_cache = EmptyCommit()
1859 cs_cache = EmptyCommit()
1859
1860
1860 if isinstance(cs_cache, BaseChangeset):
1861 if isinstance(cs_cache, BaseChangeset):
1861 cs_cache = cs_cache.__json__()
1862 cs_cache = cs_cache.__json__()
1862
1863
1863 def is_outdated(new_cs_cache):
1864 def is_outdated(new_cs_cache):
1864 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1865 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1865 new_cs_cache['revision'] != self.changeset_cache['revision']):
1866 new_cs_cache['revision'] != self.changeset_cache['revision']):
1866 return True
1867 return True
1867 return False
1868 return False
1868
1869
1869 # check if we have maybe already latest cached revision
1870 # check if we have maybe already latest cached revision
1870 if is_outdated(cs_cache) or not self.changeset_cache:
1871 if is_outdated(cs_cache) or not self.changeset_cache:
1871 _default = datetime.datetime.fromtimestamp(0)
1872 _default = datetime.datetime.fromtimestamp(0)
1872 last_change = cs_cache.get('date') or _default
1873 last_change = cs_cache.get('date') or _default
1873 log.debug('updated repo %s with new cs cache %s',
1874 log.debug('updated repo %s with new cs cache %s',
1874 self.repo_name, cs_cache)
1875 self.repo_name, cs_cache)
1875 self.updated_on = last_change
1876 self.updated_on = last_change
1876 self.changeset_cache = cs_cache
1877 self.changeset_cache = cs_cache
1877 Session().add(self)
1878 Session().add(self)
1878 Session().commit()
1879 Session().commit()
1879 else:
1880 else:
1880 log.debug('Skipping update_commit_cache for repo:`%s` '
1881 log.debug('Skipping update_commit_cache for repo:`%s` '
1881 'commit already with latest changes', self.repo_name)
1882 'commit already with latest changes', self.repo_name)
1882
1883
1883 @property
1884 @property
1884 def tip(self):
1885 def tip(self):
1885 return self.get_commit('tip')
1886 return self.get_commit('tip')
1886
1887
1887 @property
1888 @property
1888 def author(self):
1889 def author(self):
1889 return self.tip.author
1890 return self.tip.author
1890
1891
1891 @property
1892 @property
1892 def last_change(self):
1893 def last_change(self):
1893 return self.scm_instance().last_change
1894 return self.scm_instance().last_change
1894
1895
1895 def get_comments(self, revisions=None):
1896 def get_comments(self, revisions=None):
1896 """
1897 """
1897 Returns comments for this repository grouped by revisions
1898 Returns comments for this repository grouped by revisions
1898
1899
1899 :param revisions: filter query by revisions only
1900 :param revisions: filter query by revisions only
1900 """
1901 """
1901 cmts = ChangesetComment.query()\
1902 cmts = ChangesetComment.query()\
1902 .filter(ChangesetComment.repo == self)
1903 .filter(ChangesetComment.repo == self)
1903 if revisions:
1904 if revisions:
1904 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1905 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1905 grouped = collections.defaultdict(list)
1906 grouped = collections.defaultdict(list)
1906 for cmt in cmts.all():
1907 for cmt in cmts.all():
1907 grouped[cmt.revision].append(cmt)
1908 grouped[cmt.revision].append(cmt)
1908 return grouped
1909 return grouped
1909
1910
1910 def statuses(self, revisions=None):
1911 def statuses(self, revisions=None):
1911 """
1912 """
1912 Returns statuses for this repository
1913 Returns statuses for this repository
1913
1914
1914 :param revisions: list of revisions to get statuses for
1915 :param revisions: list of revisions to get statuses for
1915 """
1916 """
1916 statuses = ChangesetStatus.query()\
1917 statuses = ChangesetStatus.query()\
1917 .filter(ChangesetStatus.repo == self)\
1918 .filter(ChangesetStatus.repo == self)\
1918 .filter(ChangesetStatus.version == 0)
1919 .filter(ChangesetStatus.version == 0)
1919
1920
1920 if revisions:
1921 if revisions:
1921 # Try doing the filtering in chunks to avoid hitting limits
1922 # Try doing the filtering in chunks to avoid hitting limits
1922 size = 500
1923 size = 500
1923 status_results = []
1924 status_results = []
1924 for chunk in xrange(0, len(revisions), size):
1925 for chunk in xrange(0, len(revisions), size):
1925 status_results += statuses.filter(
1926 status_results += statuses.filter(
1926 ChangesetStatus.revision.in_(
1927 ChangesetStatus.revision.in_(
1927 revisions[chunk: chunk+size])
1928 revisions[chunk: chunk+size])
1928 ).all()
1929 ).all()
1929 else:
1930 else:
1930 status_results = statuses.all()
1931 status_results = statuses.all()
1931
1932
1932 grouped = {}
1933 grouped = {}
1933
1934
1934 # maybe we have open new pullrequest without a status?
1935 # maybe we have open new pullrequest without a status?
1935 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1936 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1936 status_lbl = ChangesetStatus.get_status_lbl(stat)
1937 status_lbl = ChangesetStatus.get_status_lbl(stat)
1937 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1938 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1938 for rev in pr.revisions:
1939 for rev in pr.revisions:
1939 pr_id = pr.pull_request_id
1940 pr_id = pr.pull_request_id
1940 pr_repo = pr.target_repo.repo_name
1941 pr_repo = pr.target_repo.repo_name
1941 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1942 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1942
1943
1943 for stat in status_results:
1944 for stat in status_results:
1944 pr_id = pr_repo = None
1945 pr_id = pr_repo = None
1945 if stat.pull_request:
1946 if stat.pull_request:
1946 pr_id = stat.pull_request.pull_request_id
1947 pr_id = stat.pull_request.pull_request_id
1947 pr_repo = stat.pull_request.target_repo.repo_name
1948 pr_repo = stat.pull_request.target_repo.repo_name
1948 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1949 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1949 pr_id, pr_repo]
1950 pr_id, pr_repo]
1950 return grouped
1951 return grouped
1951
1952
1952 # ==========================================================================
1953 # ==========================================================================
1953 # SCM CACHE INSTANCE
1954 # SCM CACHE INSTANCE
1954 # ==========================================================================
1955 # ==========================================================================
1955
1956
1956 def scm_instance(self, **kwargs):
1957 def scm_instance(self, **kwargs):
1957 import rhodecode
1958 import rhodecode
1958
1959
1959 # Passing a config will not hit the cache currently only used
1960 # Passing a config will not hit the cache currently only used
1960 # for repo2dbmapper
1961 # for repo2dbmapper
1961 config = kwargs.pop('config', None)
1962 config = kwargs.pop('config', None)
1962 cache = kwargs.pop('cache', None)
1963 cache = kwargs.pop('cache', None)
1963 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1964 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1964 # if cache is NOT defined use default global, else we have a full
1965 # if cache is NOT defined use default global, else we have a full
1965 # control over cache behaviour
1966 # control over cache behaviour
1966 if cache is None and full_cache and not config:
1967 if cache is None and full_cache and not config:
1967 return self._get_instance_cached()
1968 return self._get_instance_cached()
1968 return self._get_instance(cache=bool(cache), config=config)
1969 return self._get_instance(cache=bool(cache), config=config)
1969
1970
1970 def _get_instance_cached(self):
1971 def _get_instance_cached(self):
1971 @cache_region('long_term')
1972 @cache_region('long_term')
1972 def _get_repo(cache_key):
1973 def _get_repo(cache_key):
1973 return self._get_instance()
1974 return self._get_instance()
1974
1975
1975 invalidator_context = CacheKey.repo_context_cache(
1976 invalidator_context = CacheKey.repo_context_cache(
1976 _get_repo, self.repo_name, None)
1977 _get_repo, self.repo_name, None)
1977
1978
1978 with invalidator_context as context:
1979 with invalidator_context as context:
1979 context.invalidate()
1980 context.invalidate()
1980 repo = context.compute()
1981 repo = context.compute()
1981
1982
1982 return repo
1983 return repo
1983
1984
1984 def _get_instance(self, cache=True, config=None):
1985 def _get_instance(self, cache=True, config=None):
1985 repo_full_path = self.repo_full_path
1986 repo_full_path = self.repo_full_path
1986 try:
1987 try:
1987 vcs_alias = get_scm(repo_full_path)[0]
1988 vcs_alias = get_scm(repo_full_path)[0]
1988 log.debug(
1989 log.debug(
1989 'Creating instance of %s repository from %s',
1990 'Creating instance of %s repository from %s',
1990 vcs_alias, repo_full_path)
1991 vcs_alias, repo_full_path)
1991 backend = get_backend(vcs_alias)
1992 backend = get_backend(vcs_alias)
1992 except VCSError:
1993 except VCSError:
1993 log.exception(
1994 log.exception(
1994 'Perhaps this repository is in db and not in '
1995 'Perhaps this repository is in db and not in '
1995 'filesystem run rescan repositories with '
1996 'filesystem run rescan repositories with '
1996 '"destroy old data" option from admin panel')
1997 '"destroy old data" option from admin panel')
1997 return
1998 return
1998
1999
1999 config = config or self._config
2000 config = config or self._config
2000 custom_wire = {
2001 custom_wire = {
2001 'cache': cache # controls the vcs.remote cache
2002 'cache': cache # controls the vcs.remote cache
2002 }
2003 }
2003 repo = backend(
2004 repo = backend(
2004 safe_str(repo_full_path), config=config, create=False,
2005 safe_str(repo_full_path), config=config, create=False,
2005 with_wire=custom_wire)
2006 with_wire=custom_wire)
2006
2007
2007 return repo
2008 return repo
2008
2009
2009 def __json__(self):
2010 def __json__(self):
2010 return {'landing_rev': self.landing_rev}
2011 return {'landing_rev': self.landing_rev}
2011
2012
2012 def get_dict(self):
2013 def get_dict(self):
2013
2014
2014 # Since we transformed `repo_name` to a hybrid property, we need to
2015 # Since we transformed `repo_name` to a hybrid property, we need to
2015 # keep compatibility with the code which uses `repo_name` field.
2016 # keep compatibility with the code which uses `repo_name` field.
2016
2017
2017 result = super(Repository, self).get_dict()
2018 result = super(Repository, self).get_dict()
2018 result['repo_name'] = result.pop('_repo_name', None)
2019 result['repo_name'] = result.pop('_repo_name', None)
2019 return result
2020 return result
2020
2021
2021
2022
2022 class RepoGroup(Base, BaseModel):
2023 class RepoGroup(Base, BaseModel):
2023 __tablename__ = 'groups'
2024 __tablename__ = 'groups'
2024 __table_args__ = (
2025 __table_args__ = (
2025 UniqueConstraint('group_name', 'group_parent_id'),
2026 UniqueConstraint('group_name', 'group_parent_id'),
2026 CheckConstraint('group_id != group_parent_id'),
2027 CheckConstraint('group_id != group_parent_id'),
2027 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2028 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2028 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2029 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2029 )
2030 )
2030 __mapper_args__ = {'order_by': 'group_name'}
2031 __mapper_args__ = {'order_by': 'group_name'}
2031
2032
2032 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2033 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2033
2034
2034 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2035 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2035 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2036 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2036 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2037 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2037 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2038 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2038 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2039 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2039 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2040 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2040 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2041 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2041
2042
2042 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2043 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2043 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2044 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2044 parent_group = relationship('RepoGroup', remote_side=group_id)
2045 parent_group = relationship('RepoGroup', remote_side=group_id)
2045 user = relationship('User')
2046 user = relationship('User')
2046
2047
2047 def __init__(self, group_name='', parent_group=None):
2048 def __init__(self, group_name='', parent_group=None):
2048 self.group_name = group_name
2049 self.group_name = group_name
2049 self.parent_group = parent_group
2050 self.parent_group = parent_group
2050
2051
2051 def __unicode__(self):
2052 def __unicode__(self):
2052 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2053 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2053 self.group_name)
2054 self.group_name)
2054
2055
2055 @classmethod
2056 @classmethod
2056 def _generate_choice(cls, repo_group):
2057 def _generate_choice(cls, repo_group):
2057 from webhelpers.html import literal as _literal
2058 from webhelpers.html import literal as _literal
2058 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2059 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2059 return repo_group.group_id, _name(repo_group.full_path_splitted)
2060 return repo_group.group_id, _name(repo_group.full_path_splitted)
2060
2061
2061 @classmethod
2062 @classmethod
2062 def groups_choices(cls, groups=None, show_empty_group=True):
2063 def groups_choices(cls, groups=None, show_empty_group=True):
2063 if not groups:
2064 if not groups:
2064 groups = cls.query().all()
2065 groups = cls.query().all()
2065
2066
2066 repo_groups = []
2067 repo_groups = []
2067 if show_empty_group:
2068 if show_empty_group:
2068 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2069 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2069
2070
2070 repo_groups.extend([cls._generate_choice(x) for x in groups])
2071 repo_groups.extend([cls._generate_choice(x) for x in groups])
2071
2072
2072 repo_groups = sorted(
2073 repo_groups = sorted(
2073 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2074 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2074 return repo_groups
2075 return repo_groups
2075
2076
2076 @classmethod
2077 @classmethod
2077 def url_sep(cls):
2078 def url_sep(cls):
2078 return URL_SEP
2079 return URL_SEP
2079
2080
2080 @classmethod
2081 @classmethod
2081 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2082 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2082 if case_insensitive:
2083 if case_insensitive:
2083 gr = cls.query().filter(func.lower(cls.group_name)
2084 gr = cls.query().filter(func.lower(cls.group_name)
2084 == func.lower(group_name))
2085 == func.lower(group_name))
2085 else:
2086 else:
2086 gr = cls.query().filter(cls.group_name == group_name)
2087 gr = cls.query().filter(cls.group_name == group_name)
2087 if cache:
2088 if cache:
2088 gr = gr.options(FromCache(
2089 gr = gr.options(FromCache(
2089 "sql_cache_short",
2090 "sql_cache_short",
2090 "get_group_%s" % _hash_key(group_name)))
2091 "get_group_%s" % _hash_key(group_name)))
2091 return gr.scalar()
2092 return gr.scalar()
2092
2093
2093 @classmethod
2094 @classmethod
2094 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2095 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2095 case_insensitive=True):
2096 case_insensitive=True):
2096 q = RepoGroup.query()
2097 q = RepoGroup.query()
2097
2098
2098 if not isinstance(user_id, Optional):
2099 if not isinstance(user_id, Optional):
2099 q = q.filter(RepoGroup.user_id == user_id)
2100 q = q.filter(RepoGroup.user_id == user_id)
2100
2101
2101 if not isinstance(group_id, Optional):
2102 if not isinstance(group_id, Optional):
2102 q = q.filter(RepoGroup.group_parent_id == group_id)
2103 q = q.filter(RepoGroup.group_parent_id == group_id)
2103
2104
2104 if case_insensitive:
2105 if case_insensitive:
2105 q = q.order_by(func.lower(RepoGroup.group_name))
2106 q = q.order_by(func.lower(RepoGroup.group_name))
2106 else:
2107 else:
2107 q = q.order_by(RepoGroup.group_name)
2108 q = q.order_by(RepoGroup.group_name)
2108 return q.all()
2109 return q.all()
2109
2110
2110 @property
2111 @property
2111 def parents(self):
2112 def parents(self):
2112 parents_recursion_limit = 10
2113 parents_recursion_limit = 10
2113 groups = []
2114 groups = []
2114 if self.parent_group is None:
2115 if self.parent_group is None:
2115 return groups
2116 return groups
2116 cur_gr = self.parent_group
2117 cur_gr = self.parent_group
2117 groups.insert(0, cur_gr)
2118 groups.insert(0, cur_gr)
2118 cnt = 0
2119 cnt = 0
2119 while 1:
2120 while 1:
2120 cnt += 1
2121 cnt += 1
2121 gr = getattr(cur_gr, 'parent_group', None)
2122 gr = getattr(cur_gr, 'parent_group', None)
2122 cur_gr = cur_gr.parent_group
2123 cur_gr = cur_gr.parent_group
2123 if gr is None:
2124 if gr is None:
2124 break
2125 break
2125 if cnt == parents_recursion_limit:
2126 if cnt == parents_recursion_limit:
2126 # this will prevent accidental infinit loops
2127 # this will prevent accidental infinit loops
2127 log.error(('more than %s parents found for group %s, stopping '
2128 log.error(('more than %s parents found for group %s, stopping '
2128 'recursive parent fetching' % (parents_recursion_limit, self)))
2129 'recursive parent fetching' % (parents_recursion_limit, self)))
2129 break
2130 break
2130
2131
2131 groups.insert(0, gr)
2132 groups.insert(0, gr)
2132 return groups
2133 return groups
2133
2134
2134 @property
2135 @property
2135 def children(self):
2136 def children(self):
2136 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2137 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2137
2138
2138 @property
2139 @property
2139 def name(self):
2140 def name(self):
2140 return self.group_name.split(RepoGroup.url_sep())[-1]
2141 return self.group_name.split(RepoGroup.url_sep())[-1]
2141
2142
2142 @property
2143 @property
2143 def full_path(self):
2144 def full_path(self):
2144 return self.group_name
2145 return self.group_name
2145
2146
2146 @property
2147 @property
2147 def full_path_splitted(self):
2148 def full_path_splitted(self):
2148 return self.group_name.split(RepoGroup.url_sep())
2149 return self.group_name.split(RepoGroup.url_sep())
2149
2150
2150 @property
2151 @property
2151 def repositories(self):
2152 def repositories(self):
2152 return Repository.query()\
2153 return Repository.query()\
2153 .filter(Repository.group == self)\
2154 .filter(Repository.group == self)\
2154 .order_by(Repository.repo_name)
2155 .order_by(Repository.repo_name)
2155
2156
2156 @property
2157 @property
2157 def repositories_recursive_count(self):
2158 def repositories_recursive_count(self):
2158 cnt = self.repositories.count()
2159 cnt = self.repositories.count()
2159
2160
2160 def children_count(group):
2161 def children_count(group):
2161 cnt = 0
2162 cnt = 0
2162 for child in group.children:
2163 for child in group.children:
2163 cnt += child.repositories.count()
2164 cnt += child.repositories.count()
2164 cnt += children_count(child)
2165 cnt += children_count(child)
2165 return cnt
2166 return cnt
2166
2167
2167 return cnt + children_count(self)
2168 return cnt + children_count(self)
2168
2169
2169 def _recursive_objects(self, include_repos=True):
2170 def _recursive_objects(self, include_repos=True):
2170 all_ = []
2171 all_ = []
2171
2172
2172 def _get_members(root_gr):
2173 def _get_members(root_gr):
2173 if include_repos:
2174 if include_repos:
2174 for r in root_gr.repositories:
2175 for r in root_gr.repositories:
2175 all_.append(r)
2176 all_.append(r)
2176 childs = root_gr.children.all()
2177 childs = root_gr.children.all()
2177 if childs:
2178 if childs:
2178 for gr in childs:
2179 for gr in childs:
2179 all_.append(gr)
2180 all_.append(gr)
2180 _get_members(gr)
2181 _get_members(gr)
2181
2182
2182 _get_members(self)
2183 _get_members(self)
2183 return [self] + all_
2184 return [self] + all_
2184
2185
2185 def recursive_groups_and_repos(self):
2186 def recursive_groups_and_repos(self):
2186 """
2187 """
2187 Recursive return all groups, with repositories in those groups
2188 Recursive return all groups, with repositories in those groups
2188 """
2189 """
2189 return self._recursive_objects()
2190 return self._recursive_objects()
2190
2191
2191 def recursive_groups(self):
2192 def recursive_groups(self):
2192 """
2193 """
2193 Returns all children groups for this group including children of children
2194 Returns all children groups for this group including children of children
2194 """
2195 """
2195 return self._recursive_objects(include_repos=False)
2196 return self._recursive_objects(include_repos=False)
2196
2197
2197 def get_new_name(self, group_name):
2198 def get_new_name(self, group_name):
2198 """
2199 """
2199 returns new full group name based on parent and new name
2200 returns new full group name based on parent and new name
2200
2201
2201 :param group_name:
2202 :param group_name:
2202 """
2203 """
2203 path_prefix = (self.parent_group.full_path_splitted if
2204 path_prefix = (self.parent_group.full_path_splitted if
2204 self.parent_group else [])
2205 self.parent_group else [])
2205 return RepoGroup.url_sep().join(path_prefix + [group_name])
2206 return RepoGroup.url_sep().join(path_prefix + [group_name])
2206
2207
2207 def permissions(self, with_admins=True, with_owner=True):
2208 def permissions(self, with_admins=True, with_owner=True):
2208 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2209 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2209 q = q.options(joinedload(UserRepoGroupToPerm.group),
2210 q = q.options(joinedload(UserRepoGroupToPerm.group),
2210 joinedload(UserRepoGroupToPerm.user),
2211 joinedload(UserRepoGroupToPerm.user),
2211 joinedload(UserRepoGroupToPerm.permission),)
2212 joinedload(UserRepoGroupToPerm.permission),)
2212
2213
2213 # get owners and admins and permissions. We do a trick of re-writing
2214 # get owners and admins and permissions. We do a trick of re-writing
2214 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2215 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2215 # has a global reference and changing one object propagates to all
2216 # has a global reference and changing one object propagates to all
2216 # others. This means if admin is also an owner admin_row that change
2217 # others. This means if admin is also an owner admin_row that change
2217 # would propagate to both objects
2218 # would propagate to both objects
2218 perm_rows = []
2219 perm_rows = []
2219 for _usr in q.all():
2220 for _usr in q.all():
2220 usr = AttributeDict(_usr.user.get_dict())
2221 usr = AttributeDict(_usr.user.get_dict())
2221 usr.permission = _usr.permission.permission_name
2222 usr.permission = _usr.permission.permission_name
2222 perm_rows.append(usr)
2223 perm_rows.append(usr)
2223
2224
2224 # filter the perm rows by 'default' first and then sort them by
2225 # filter the perm rows by 'default' first and then sort them by
2225 # admin,write,read,none permissions sorted again alphabetically in
2226 # admin,write,read,none permissions sorted again alphabetically in
2226 # each group
2227 # each group
2227 perm_rows = sorted(perm_rows, key=display_sort)
2228 perm_rows = sorted(perm_rows, key=display_sort)
2228
2229
2229 _admin_perm = 'group.admin'
2230 _admin_perm = 'group.admin'
2230 owner_row = []
2231 owner_row = []
2231 if with_owner:
2232 if with_owner:
2232 usr = AttributeDict(self.user.get_dict())
2233 usr = AttributeDict(self.user.get_dict())
2233 usr.owner_row = True
2234 usr.owner_row = True
2234 usr.permission = _admin_perm
2235 usr.permission = _admin_perm
2235 owner_row.append(usr)
2236 owner_row.append(usr)
2236
2237
2237 super_admin_rows = []
2238 super_admin_rows = []
2238 if with_admins:
2239 if with_admins:
2239 for usr in User.get_all_super_admins():
2240 for usr in User.get_all_super_admins():
2240 # if this admin is also owner, don't double the record
2241 # if this admin is also owner, don't double the record
2241 if usr.user_id == owner_row[0].user_id:
2242 if usr.user_id == owner_row[0].user_id:
2242 owner_row[0].admin_row = True
2243 owner_row[0].admin_row = True
2243 else:
2244 else:
2244 usr = AttributeDict(usr.get_dict())
2245 usr = AttributeDict(usr.get_dict())
2245 usr.admin_row = True
2246 usr.admin_row = True
2246 usr.permission = _admin_perm
2247 usr.permission = _admin_perm
2247 super_admin_rows.append(usr)
2248 super_admin_rows.append(usr)
2248
2249
2249 return super_admin_rows + owner_row + perm_rows
2250 return super_admin_rows + owner_row + perm_rows
2250
2251
2251 def permission_user_groups(self):
2252 def permission_user_groups(self):
2252 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2253 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2253 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2254 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2254 joinedload(UserGroupRepoGroupToPerm.users_group),
2255 joinedload(UserGroupRepoGroupToPerm.users_group),
2255 joinedload(UserGroupRepoGroupToPerm.permission),)
2256 joinedload(UserGroupRepoGroupToPerm.permission),)
2256
2257
2257 perm_rows = []
2258 perm_rows = []
2258 for _user_group in q.all():
2259 for _user_group in q.all():
2259 usr = AttributeDict(_user_group.users_group.get_dict())
2260 usr = AttributeDict(_user_group.users_group.get_dict())
2260 usr.permission = _user_group.permission.permission_name
2261 usr.permission = _user_group.permission.permission_name
2261 perm_rows.append(usr)
2262 perm_rows.append(usr)
2262
2263
2263 return perm_rows
2264 return perm_rows
2264
2265
2265 def get_api_data(self):
2266 def get_api_data(self):
2266 """
2267 """
2267 Common function for generating api data
2268 Common function for generating api data
2268
2269
2269 """
2270 """
2270 group = self
2271 group = self
2271 data = {
2272 data = {
2272 'group_id': group.group_id,
2273 'group_id': group.group_id,
2273 'group_name': group.group_name,
2274 'group_name': group.group_name,
2274 'group_description': group.group_description,
2275 'group_description': group.group_description,
2275 'parent_group': group.parent_group.group_name if group.parent_group else None,
2276 'parent_group': group.parent_group.group_name if group.parent_group else None,
2276 'repositories': [x.repo_name for x in group.repositories],
2277 'repositories': [x.repo_name for x in group.repositories],
2277 'owner': group.user.username,
2278 'owner': group.user.username,
2278 }
2279 }
2279 return data
2280 return data
2280
2281
2281
2282
2282 class Permission(Base, BaseModel):
2283 class Permission(Base, BaseModel):
2283 __tablename__ = 'permissions'
2284 __tablename__ = 'permissions'
2284 __table_args__ = (
2285 __table_args__ = (
2285 Index('p_perm_name_idx', 'permission_name'),
2286 Index('p_perm_name_idx', 'permission_name'),
2286 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2287 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2287 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2288 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2288 )
2289 )
2289 PERMS = [
2290 PERMS = [
2290 ('hg.admin', _('RhodeCode Super Administrator')),
2291 ('hg.admin', _('RhodeCode Super Administrator')),
2291
2292
2292 ('repository.none', _('Repository no access')),
2293 ('repository.none', _('Repository no access')),
2293 ('repository.read', _('Repository read access')),
2294 ('repository.read', _('Repository read access')),
2294 ('repository.write', _('Repository write access')),
2295 ('repository.write', _('Repository write access')),
2295 ('repository.admin', _('Repository admin access')),
2296 ('repository.admin', _('Repository admin access')),
2296
2297
2297 ('group.none', _('Repository group no access')),
2298 ('group.none', _('Repository group no access')),
2298 ('group.read', _('Repository group read access')),
2299 ('group.read', _('Repository group read access')),
2299 ('group.write', _('Repository group write access')),
2300 ('group.write', _('Repository group write access')),
2300 ('group.admin', _('Repository group admin access')),
2301 ('group.admin', _('Repository group admin access')),
2301
2302
2302 ('usergroup.none', _('User group no access')),
2303 ('usergroup.none', _('User group no access')),
2303 ('usergroup.read', _('User group read access')),
2304 ('usergroup.read', _('User group read access')),
2304 ('usergroup.write', _('User group write access')),
2305 ('usergroup.write', _('User group write access')),
2305 ('usergroup.admin', _('User group admin access')),
2306 ('usergroup.admin', _('User group admin access')),
2306
2307
2307 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2308 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2308 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2309 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2309
2310
2310 ('hg.usergroup.create.false', _('User Group creation disabled')),
2311 ('hg.usergroup.create.false', _('User Group creation disabled')),
2311 ('hg.usergroup.create.true', _('User Group creation enabled')),
2312 ('hg.usergroup.create.true', _('User Group creation enabled')),
2312
2313
2313 ('hg.create.none', _('Repository creation disabled')),
2314 ('hg.create.none', _('Repository creation disabled')),
2314 ('hg.create.repository', _('Repository creation enabled')),
2315 ('hg.create.repository', _('Repository creation enabled')),
2315 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2316 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2316 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2317 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2317
2318
2318 ('hg.fork.none', _('Repository forking disabled')),
2319 ('hg.fork.none', _('Repository forking disabled')),
2319 ('hg.fork.repository', _('Repository forking enabled')),
2320 ('hg.fork.repository', _('Repository forking enabled')),
2320
2321
2321 ('hg.register.none', _('Registration disabled')),
2322 ('hg.register.none', _('Registration disabled')),
2322 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2323 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2323 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2324 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2324
2325
2325 ('hg.extern_activate.manual', _('Manual activation of external account')),
2326 ('hg.extern_activate.manual', _('Manual activation of external account')),
2326 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2327 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2327
2328
2328 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2329 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2329 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2330 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2330 ]
2331 ]
2331
2332
2332 # definition of system default permissions for DEFAULT user
2333 # definition of system default permissions for DEFAULT user
2333 DEFAULT_USER_PERMISSIONS = [
2334 DEFAULT_USER_PERMISSIONS = [
2334 'repository.read',
2335 'repository.read',
2335 'group.read',
2336 'group.read',
2336 'usergroup.read',
2337 'usergroup.read',
2337 'hg.create.repository',
2338 'hg.create.repository',
2338 'hg.repogroup.create.false',
2339 'hg.repogroup.create.false',
2339 'hg.usergroup.create.false',
2340 'hg.usergroup.create.false',
2340 'hg.create.write_on_repogroup.true',
2341 'hg.create.write_on_repogroup.true',
2341 'hg.fork.repository',
2342 'hg.fork.repository',
2342 'hg.register.manual_activate',
2343 'hg.register.manual_activate',
2343 'hg.extern_activate.auto',
2344 'hg.extern_activate.auto',
2344 'hg.inherit_default_perms.true',
2345 'hg.inherit_default_perms.true',
2345 ]
2346 ]
2346
2347
2347 # defines which permissions are more important higher the more important
2348 # defines which permissions are more important higher the more important
2348 # Weight defines which permissions are more important.
2349 # Weight defines which permissions are more important.
2349 # The higher number the more important.
2350 # The higher number the more important.
2350 PERM_WEIGHTS = {
2351 PERM_WEIGHTS = {
2351 'repository.none': 0,
2352 'repository.none': 0,
2352 'repository.read': 1,
2353 'repository.read': 1,
2353 'repository.write': 3,
2354 'repository.write': 3,
2354 'repository.admin': 4,
2355 'repository.admin': 4,
2355
2356
2356 'group.none': 0,
2357 'group.none': 0,
2357 'group.read': 1,
2358 'group.read': 1,
2358 'group.write': 3,
2359 'group.write': 3,
2359 'group.admin': 4,
2360 'group.admin': 4,
2360
2361
2361 'usergroup.none': 0,
2362 'usergroup.none': 0,
2362 'usergroup.read': 1,
2363 'usergroup.read': 1,
2363 'usergroup.write': 3,
2364 'usergroup.write': 3,
2364 'usergroup.admin': 4,
2365 'usergroup.admin': 4,
2365
2366
2366 'hg.repogroup.create.false': 0,
2367 'hg.repogroup.create.false': 0,
2367 'hg.repogroup.create.true': 1,
2368 'hg.repogroup.create.true': 1,
2368
2369
2369 'hg.usergroup.create.false': 0,
2370 'hg.usergroup.create.false': 0,
2370 'hg.usergroup.create.true': 1,
2371 'hg.usergroup.create.true': 1,
2371
2372
2372 'hg.fork.none': 0,
2373 'hg.fork.none': 0,
2373 'hg.fork.repository': 1,
2374 'hg.fork.repository': 1,
2374 'hg.create.none': 0,
2375 'hg.create.none': 0,
2375 'hg.create.repository': 1
2376 'hg.create.repository': 1
2376 }
2377 }
2377
2378
2378 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2379 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2379 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2380 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2380 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2381 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2381
2382
2382 def __unicode__(self):
2383 def __unicode__(self):
2383 return u"<%s('%s:%s')>" % (
2384 return u"<%s('%s:%s')>" % (
2384 self.__class__.__name__, self.permission_id, self.permission_name
2385 self.__class__.__name__, self.permission_id, self.permission_name
2385 )
2386 )
2386
2387
2387 @classmethod
2388 @classmethod
2388 def get_by_key(cls, key):
2389 def get_by_key(cls, key):
2389 return cls.query().filter(cls.permission_name == key).scalar()
2390 return cls.query().filter(cls.permission_name == key).scalar()
2390
2391
2391 @classmethod
2392 @classmethod
2392 def get_default_repo_perms(cls, user_id, repo_id=None):
2393 def get_default_repo_perms(cls, user_id, repo_id=None):
2393 q = Session().query(UserRepoToPerm, Repository, Permission)\
2394 q = Session().query(UserRepoToPerm, Repository, Permission)\
2394 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2395 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2395 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2396 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2396 .filter(UserRepoToPerm.user_id == user_id)
2397 .filter(UserRepoToPerm.user_id == user_id)
2397 if repo_id:
2398 if repo_id:
2398 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2399 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2399 return q.all()
2400 return q.all()
2400
2401
2401 @classmethod
2402 @classmethod
2402 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2403 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2403 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2404 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2404 .join(
2405 .join(
2405 Permission,
2406 Permission,
2406 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2407 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2407 .join(
2408 .join(
2408 Repository,
2409 Repository,
2409 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2410 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2410 .join(
2411 .join(
2411 UserGroup,
2412 UserGroup,
2412 UserGroupRepoToPerm.users_group_id ==
2413 UserGroupRepoToPerm.users_group_id ==
2413 UserGroup.users_group_id)\
2414 UserGroup.users_group_id)\
2414 .join(
2415 .join(
2415 UserGroupMember,
2416 UserGroupMember,
2416 UserGroupRepoToPerm.users_group_id ==
2417 UserGroupRepoToPerm.users_group_id ==
2417 UserGroupMember.users_group_id)\
2418 UserGroupMember.users_group_id)\
2418 .filter(
2419 .filter(
2419 UserGroupMember.user_id == user_id,
2420 UserGroupMember.user_id == user_id,
2420 UserGroup.users_group_active == true())
2421 UserGroup.users_group_active == true())
2421 if repo_id:
2422 if repo_id:
2422 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2423 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2423 return q.all()
2424 return q.all()
2424
2425
2425 @classmethod
2426 @classmethod
2426 def get_default_group_perms(cls, user_id, repo_group_id=None):
2427 def get_default_group_perms(cls, user_id, repo_group_id=None):
2427 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2428 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2428 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2429 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2429 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2430 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2430 .filter(UserRepoGroupToPerm.user_id == user_id)
2431 .filter(UserRepoGroupToPerm.user_id == user_id)
2431 if repo_group_id:
2432 if repo_group_id:
2432 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2433 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2433 return q.all()
2434 return q.all()
2434
2435
2435 @classmethod
2436 @classmethod
2436 def get_default_group_perms_from_user_group(
2437 def get_default_group_perms_from_user_group(
2437 cls, user_id, repo_group_id=None):
2438 cls, user_id, repo_group_id=None):
2438 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2439 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2439 .join(
2440 .join(
2440 Permission,
2441 Permission,
2441 UserGroupRepoGroupToPerm.permission_id ==
2442 UserGroupRepoGroupToPerm.permission_id ==
2442 Permission.permission_id)\
2443 Permission.permission_id)\
2443 .join(
2444 .join(
2444 RepoGroup,
2445 RepoGroup,
2445 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2446 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2446 .join(
2447 .join(
2447 UserGroup,
2448 UserGroup,
2448 UserGroupRepoGroupToPerm.users_group_id ==
2449 UserGroupRepoGroupToPerm.users_group_id ==
2449 UserGroup.users_group_id)\
2450 UserGroup.users_group_id)\
2450 .join(
2451 .join(
2451 UserGroupMember,
2452 UserGroupMember,
2452 UserGroupRepoGroupToPerm.users_group_id ==
2453 UserGroupRepoGroupToPerm.users_group_id ==
2453 UserGroupMember.users_group_id)\
2454 UserGroupMember.users_group_id)\
2454 .filter(
2455 .filter(
2455 UserGroupMember.user_id == user_id,
2456 UserGroupMember.user_id == user_id,
2456 UserGroup.users_group_active == true())
2457 UserGroup.users_group_active == true())
2457 if repo_group_id:
2458 if repo_group_id:
2458 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2459 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2459 return q.all()
2460 return q.all()
2460
2461
2461 @classmethod
2462 @classmethod
2462 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2463 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2463 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2464 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2464 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2465 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2465 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2466 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2466 .filter(UserUserGroupToPerm.user_id == user_id)
2467 .filter(UserUserGroupToPerm.user_id == user_id)
2467 if user_group_id:
2468 if user_group_id:
2468 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2469 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2469 return q.all()
2470 return q.all()
2470
2471
2471 @classmethod
2472 @classmethod
2472 def get_default_user_group_perms_from_user_group(
2473 def get_default_user_group_perms_from_user_group(
2473 cls, user_id, user_group_id=None):
2474 cls, user_id, user_group_id=None):
2474 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2475 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2475 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2476 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2476 .join(
2477 .join(
2477 Permission,
2478 Permission,
2478 UserGroupUserGroupToPerm.permission_id ==
2479 UserGroupUserGroupToPerm.permission_id ==
2479 Permission.permission_id)\
2480 Permission.permission_id)\
2480 .join(
2481 .join(
2481 TargetUserGroup,
2482 TargetUserGroup,
2482 UserGroupUserGroupToPerm.target_user_group_id ==
2483 UserGroupUserGroupToPerm.target_user_group_id ==
2483 TargetUserGroup.users_group_id)\
2484 TargetUserGroup.users_group_id)\
2484 .join(
2485 .join(
2485 UserGroup,
2486 UserGroup,
2486 UserGroupUserGroupToPerm.user_group_id ==
2487 UserGroupUserGroupToPerm.user_group_id ==
2487 UserGroup.users_group_id)\
2488 UserGroup.users_group_id)\
2488 .join(
2489 .join(
2489 UserGroupMember,
2490 UserGroupMember,
2490 UserGroupUserGroupToPerm.user_group_id ==
2491 UserGroupUserGroupToPerm.user_group_id ==
2491 UserGroupMember.users_group_id)\
2492 UserGroupMember.users_group_id)\
2492 .filter(
2493 .filter(
2493 UserGroupMember.user_id == user_id,
2494 UserGroupMember.user_id == user_id,
2494 UserGroup.users_group_active == true())
2495 UserGroup.users_group_active == true())
2495 if user_group_id:
2496 if user_group_id:
2496 q = q.filter(
2497 q = q.filter(
2497 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2498 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2498
2499
2499 return q.all()
2500 return q.all()
2500
2501
2501
2502
2502 class UserRepoToPerm(Base, BaseModel):
2503 class UserRepoToPerm(Base, BaseModel):
2503 __tablename__ = 'repo_to_perm'
2504 __tablename__ = 'repo_to_perm'
2504 __table_args__ = (
2505 __table_args__ = (
2505 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2506 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2506 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2507 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2507 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2508 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2508 )
2509 )
2509 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2510 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2510 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2511 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2511 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2512 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2512 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2513 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2513
2514
2514 user = relationship('User')
2515 user = relationship('User')
2515 repository = relationship('Repository')
2516 repository = relationship('Repository')
2516 permission = relationship('Permission')
2517 permission = relationship('Permission')
2517
2518
2518 @classmethod
2519 @classmethod
2519 def create(cls, user, repository, permission):
2520 def create(cls, user, repository, permission):
2520 n = cls()
2521 n = cls()
2521 n.user = user
2522 n.user = user
2522 n.repository = repository
2523 n.repository = repository
2523 n.permission = permission
2524 n.permission = permission
2524 Session().add(n)
2525 Session().add(n)
2525 return n
2526 return n
2526
2527
2527 def __unicode__(self):
2528 def __unicode__(self):
2528 return u'<%s => %s >' % (self.user, self.repository)
2529 return u'<%s => %s >' % (self.user, self.repository)
2529
2530
2530
2531
2531 class UserUserGroupToPerm(Base, BaseModel):
2532 class UserUserGroupToPerm(Base, BaseModel):
2532 __tablename__ = 'user_user_group_to_perm'
2533 __tablename__ = 'user_user_group_to_perm'
2533 __table_args__ = (
2534 __table_args__ = (
2534 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2535 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2535 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2536 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2536 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2537 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2537 )
2538 )
2538 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2539 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2539 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2540 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2540 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2541 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2541 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2542 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2542
2543
2543 user = relationship('User')
2544 user = relationship('User')
2544 user_group = relationship('UserGroup')
2545 user_group = relationship('UserGroup')
2545 permission = relationship('Permission')
2546 permission = relationship('Permission')
2546
2547
2547 @classmethod
2548 @classmethod
2548 def create(cls, user, user_group, permission):
2549 def create(cls, user, user_group, permission):
2549 n = cls()
2550 n = cls()
2550 n.user = user
2551 n.user = user
2551 n.user_group = user_group
2552 n.user_group = user_group
2552 n.permission = permission
2553 n.permission = permission
2553 Session().add(n)
2554 Session().add(n)
2554 return n
2555 return n
2555
2556
2556 def __unicode__(self):
2557 def __unicode__(self):
2557 return u'<%s => %s >' % (self.user, self.user_group)
2558 return u'<%s => %s >' % (self.user, self.user_group)
2558
2559
2559
2560
2560 class UserToPerm(Base, BaseModel):
2561 class UserToPerm(Base, BaseModel):
2561 __tablename__ = 'user_to_perm'
2562 __tablename__ = 'user_to_perm'
2562 __table_args__ = (
2563 __table_args__ = (
2563 UniqueConstraint('user_id', 'permission_id'),
2564 UniqueConstraint('user_id', 'permission_id'),
2564 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2565 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2565 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2566 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2566 )
2567 )
2567 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2568 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2568 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2569 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2569 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2570 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2570
2571
2571 user = relationship('User')
2572 user = relationship('User')
2572 permission = relationship('Permission', lazy='joined')
2573 permission = relationship('Permission', lazy='joined')
2573
2574
2574 def __unicode__(self):
2575 def __unicode__(self):
2575 return u'<%s => %s >' % (self.user, self.permission)
2576 return u'<%s => %s >' % (self.user, self.permission)
2576
2577
2577
2578
2578 class UserGroupRepoToPerm(Base, BaseModel):
2579 class UserGroupRepoToPerm(Base, BaseModel):
2579 __tablename__ = 'users_group_repo_to_perm'
2580 __tablename__ = 'users_group_repo_to_perm'
2580 __table_args__ = (
2581 __table_args__ = (
2581 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2582 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2582 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2583 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2583 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2584 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2584 )
2585 )
2585 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2586 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2586 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2587 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2587 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2588 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2588 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2589 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2589
2590
2590 users_group = relationship('UserGroup')
2591 users_group = relationship('UserGroup')
2591 permission = relationship('Permission')
2592 permission = relationship('Permission')
2592 repository = relationship('Repository')
2593 repository = relationship('Repository')
2593
2594
2594 @classmethod
2595 @classmethod
2595 def create(cls, users_group, repository, permission):
2596 def create(cls, users_group, repository, permission):
2596 n = cls()
2597 n = cls()
2597 n.users_group = users_group
2598 n.users_group = users_group
2598 n.repository = repository
2599 n.repository = repository
2599 n.permission = permission
2600 n.permission = permission
2600 Session().add(n)
2601 Session().add(n)
2601 return n
2602 return n
2602
2603
2603 def __unicode__(self):
2604 def __unicode__(self):
2604 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2605 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2605
2606
2606
2607
2607 class UserGroupUserGroupToPerm(Base, BaseModel):
2608 class UserGroupUserGroupToPerm(Base, BaseModel):
2608 __tablename__ = 'user_group_user_group_to_perm'
2609 __tablename__ = 'user_group_user_group_to_perm'
2609 __table_args__ = (
2610 __table_args__ = (
2610 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2611 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2611 CheckConstraint('target_user_group_id != user_group_id'),
2612 CheckConstraint('target_user_group_id != user_group_id'),
2612 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2613 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2613 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2614 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2614 )
2615 )
2615 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2616 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2616 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2617 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2617 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2618 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2618 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2619 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2619
2620
2620 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2621 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2621 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2622 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2622 permission = relationship('Permission')
2623 permission = relationship('Permission')
2623
2624
2624 @classmethod
2625 @classmethod
2625 def create(cls, target_user_group, user_group, permission):
2626 def create(cls, target_user_group, user_group, permission):
2626 n = cls()
2627 n = cls()
2627 n.target_user_group = target_user_group
2628 n.target_user_group = target_user_group
2628 n.user_group = user_group
2629 n.user_group = user_group
2629 n.permission = permission
2630 n.permission = permission
2630 Session().add(n)
2631 Session().add(n)
2631 return n
2632 return n
2632
2633
2633 def __unicode__(self):
2634 def __unicode__(self):
2634 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2635 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2635
2636
2636
2637
2637 class UserGroupToPerm(Base, BaseModel):
2638 class UserGroupToPerm(Base, BaseModel):
2638 __tablename__ = 'users_group_to_perm'
2639 __tablename__ = 'users_group_to_perm'
2639 __table_args__ = (
2640 __table_args__ = (
2640 UniqueConstraint('users_group_id', 'permission_id',),
2641 UniqueConstraint('users_group_id', 'permission_id',),
2641 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2642 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2642 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2643 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2643 )
2644 )
2644 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2645 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2645 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2646 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2646 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2647 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2647
2648
2648 users_group = relationship('UserGroup')
2649 users_group = relationship('UserGroup')
2649 permission = relationship('Permission')
2650 permission = relationship('Permission')
2650
2651
2651
2652
2652 class UserRepoGroupToPerm(Base, BaseModel):
2653 class UserRepoGroupToPerm(Base, BaseModel):
2653 __tablename__ = 'user_repo_group_to_perm'
2654 __tablename__ = 'user_repo_group_to_perm'
2654 __table_args__ = (
2655 __table_args__ = (
2655 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2656 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2656 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2657 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2657 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2658 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2658 )
2659 )
2659
2660
2660 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2661 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2661 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2662 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2662 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2663 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2663 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2664 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2664
2665
2665 user = relationship('User')
2666 user = relationship('User')
2666 group = relationship('RepoGroup')
2667 group = relationship('RepoGroup')
2667 permission = relationship('Permission')
2668 permission = relationship('Permission')
2668
2669
2669 @classmethod
2670 @classmethod
2670 def create(cls, user, repository_group, permission):
2671 def create(cls, user, repository_group, permission):
2671 n = cls()
2672 n = cls()
2672 n.user = user
2673 n.user = user
2673 n.group = repository_group
2674 n.group = repository_group
2674 n.permission = permission
2675 n.permission = permission
2675 Session().add(n)
2676 Session().add(n)
2676 return n
2677 return n
2677
2678
2678
2679
2679 class UserGroupRepoGroupToPerm(Base, BaseModel):
2680 class UserGroupRepoGroupToPerm(Base, BaseModel):
2680 __tablename__ = 'users_group_repo_group_to_perm'
2681 __tablename__ = 'users_group_repo_group_to_perm'
2681 __table_args__ = (
2682 __table_args__ = (
2682 UniqueConstraint('users_group_id', 'group_id'),
2683 UniqueConstraint('users_group_id', 'group_id'),
2683 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2684 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2684 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2685 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2685 )
2686 )
2686
2687
2687 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2688 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2688 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2689 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2689 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2690 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2690 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2691 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2691
2692
2692 users_group = relationship('UserGroup')
2693 users_group = relationship('UserGroup')
2693 permission = relationship('Permission')
2694 permission = relationship('Permission')
2694 group = relationship('RepoGroup')
2695 group = relationship('RepoGroup')
2695
2696
2696 @classmethod
2697 @classmethod
2697 def create(cls, user_group, repository_group, permission):
2698 def create(cls, user_group, repository_group, permission):
2698 n = cls()
2699 n = cls()
2699 n.users_group = user_group
2700 n.users_group = user_group
2700 n.group = repository_group
2701 n.group = repository_group
2701 n.permission = permission
2702 n.permission = permission
2702 Session().add(n)
2703 Session().add(n)
2703 return n
2704 return n
2704
2705
2705 def __unicode__(self):
2706 def __unicode__(self):
2706 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2707 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2707
2708
2708
2709
2709 class Statistics(Base, BaseModel):
2710 class Statistics(Base, BaseModel):
2710 __tablename__ = 'statistics'
2711 __tablename__ = 'statistics'
2711 __table_args__ = (
2712 __table_args__ = (
2712 UniqueConstraint('repository_id'),
2713 UniqueConstraint('repository_id'),
2713 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2714 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2714 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2715 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2715 )
2716 )
2716 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2717 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2717 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2718 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2718 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2719 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2719 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2720 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2720 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2721 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2721 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2722 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2722
2723
2723 repository = relationship('Repository', single_parent=True)
2724 repository = relationship('Repository', single_parent=True)
2724
2725
2725
2726
2726 class UserFollowing(Base, BaseModel):
2727 class UserFollowing(Base, BaseModel):
2727 __tablename__ = 'user_followings'
2728 __tablename__ = 'user_followings'
2728 __table_args__ = (
2729 __table_args__ = (
2729 UniqueConstraint('user_id', 'follows_repository_id'),
2730 UniqueConstraint('user_id', 'follows_repository_id'),
2730 UniqueConstraint('user_id', 'follows_user_id'),
2731 UniqueConstraint('user_id', 'follows_user_id'),
2731 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2732 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2732 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2733 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2733 )
2734 )
2734
2735
2735 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2736 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2736 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2737 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2737 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2738 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2738 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2739 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2739 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2740 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2740
2741
2741 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2742 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2742
2743
2743 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2744 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2744 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2745 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2745
2746
2746 @classmethod
2747 @classmethod
2747 def get_repo_followers(cls, repo_id):
2748 def get_repo_followers(cls, repo_id):
2748 return cls.query().filter(cls.follows_repo_id == repo_id)
2749 return cls.query().filter(cls.follows_repo_id == repo_id)
2749
2750
2750
2751
2751 class CacheKey(Base, BaseModel):
2752 class CacheKey(Base, BaseModel):
2752 __tablename__ = 'cache_invalidation'
2753 __tablename__ = 'cache_invalidation'
2753 __table_args__ = (
2754 __table_args__ = (
2754 UniqueConstraint('cache_key'),
2755 UniqueConstraint('cache_key'),
2755 Index('key_idx', 'cache_key'),
2756 Index('key_idx', 'cache_key'),
2756 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2757 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2757 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2758 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2758 )
2759 )
2759 CACHE_TYPE_ATOM = 'ATOM'
2760 CACHE_TYPE_ATOM = 'ATOM'
2760 CACHE_TYPE_RSS = 'RSS'
2761 CACHE_TYPE_RSS = 'RSS'
2761 CACHE_TYPE_README = 'README'
2762 CACHE_TYPE_README = 'README'
2762
2763
2763 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2764 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2764 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2765 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2765 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2766 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2766 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2767 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2767
2768
2768 def __init__(self, cache_key, cache_args=''):
2769 def __init__(self, cache_key, cache_args=''):
2769 self.cache_key = cache_key
2770 self.cache_key = cache_key
2770 self.cache_args = cache_args
2771 self.cache_args = cache_args
2771 self.cache_active = False
2772 self.cache_active = False
2772
2773
2773 def __unicode__(self):
2774 def __unicode__(self):
2774 return u"<%s('%s:%s[%s]')>" % (
2775 return u"<%s('%s:%s[%s]')>" % (
2775 self.__class__.__name__,
2776 self.__class__.__name__,
2776 self.cache_id, self.cache_key, self.cache_active)
2777 self.cache_id, self.cache_key, self.cache_active)
2777
2778
2778 def _cache_key_partition(self):
2779 def _cache_key_partition(self):
2779 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2780 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2780 return prefix, repo_name, suffix
2781 return prefix, repo_name, suffix
2781
2782
2782 def get_prefix(self):
2783 def get_prefix(self):
2783 """
2784 """
2784 Try to extract prefix from existing cache key. The key could consist
2785 Try to extract prefix from existing cache key. The key could consist
2785 of prefix, repo_name, suffix
2786 of prefix, repo_name, suffix
2786 """
2787 """
2787 # this returns prefix, repo_name, suffix
2788 # this returns prefix, repo_name, suffix
2788 return self._cache_key_partition()[0]
2789 return self._cache_key_partition()[0]
2789
2790
2790 def get_suffix(self):
2791 def get_suffix(self):
2791 """
2792 """
2792 get suffix that might have been used in _get_cache_key to
2793 get suffix that might have been used in _get_cache_key to
2793 generate self.cache_key. Only used for informational purposes
2794 generate self.cache_key. Only used for informational purposes
2794 in repo_edit.html.
2795 in repo_edit.html.
2795 """
2796 """
2796 # prefix, repo_name, suffix
2797 # prefix, repo_name, suffix
2797 return self._cache_key_partition()[2]
2798 return self._cache_key_partition()[2]
2798
2799
2799 @classmethod
2800 @classmethod
2800 def delete_all_cache(cls):
2801 def delete_all_cache(cls):
2801 """
2802 """
2802 Delete all cache keys from database.
2803 Delete all cache keys from database.
2803 Should only be run when all instances are down and all entries
2804 Should only be run when all instances are down and all entries
2804 thus stale.
2805 thus stale.
2805 """
2806 """
2806 cls.query().delete()
2807 cls.query().delete()
2807 Session().commit()
2808 Session().commit()
2808
2809
2809 @classmethod
2810 @classmethod
2810 def get_cache_key(cls, repo_name, cache_type):
2811 def get_cache_key(cls, repo_name, cache_type):
2811 """
2812 """
2812
2813
2813 Generate a cache key for this process of RhodeCode instance.
2814 Generate a cache key for this process of RhodeCode instance.
2814 Prefix most likely will be process id or maybe explicitly set
2815 Prefix most likely will be process id or maybe explicitly set
2815 instance_id from .ini file.
2816 instance_id from .ini file.
2816 """
2817 """
2817 import rhodecode
2818 import rhodecode
2818 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2819 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2819
2820
2820 repo_as_unicode = safe_unicode(repo_name)
2821 repo_as_unicode = safe_unicode(repo_name)
2821 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2822 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2822 if cache_type else repo_as_unicode
2823 if cache_type else repo_as_unicode
2823
2824
2824 return u'{}{}'.format(prefix, key)
2825 return u'{}{}'.format(prefix, key)
2825
2826
2826 @classmethod
2827 @classmethod
2827 def set_invalidate(cls, repo_name, delete=False):
2828 def set_invalidate(cls, repo_name, delete=False):
2828 """
2829 """
2829 Mark all caches of a repo as invalid in the database.
2830 Mark all caches of a repo as invalid in the database.
2830 """
2831 """
2831
2832
2832 try:
2833 try:
2833 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2834 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2834 if delete:
2835 if delete:
2835 log.debug('cache objects deleted for repo %s',
2836 log.debug('cache objects deleted for repo %s',
2836 safe_str(repo_name))
2837 safe_str(repo_name))
2837 qry.delete()
2838 qry.delete()
2838 else:
2839 else:
2839 log.debug('cache objects marked as invalid for repo %s',
2840 log.debug('cache objects marked as invalid for repo %s',
2840 safe_str(repo_name))
2841 safe_str(repo_name))
2841 qry.update({"cache_active": False})
2842 qry.update({"cache_active": False})
2842
2843
2843 Session().commit()
2844 Session().commit()
2844 except Exception:
2845 except Exception:
2845 log.exception(
2846 log.exception(
2846 'Cache key invalidation failed for repository %s',
2847 'Cache key invalidation failed for repository %s',
2847 safe_str(repo_name))
2848 safe_str(repo_name))
2848 Session().rollback()
2849 Session().rollback()
2849
2850
2850 @classmethod
2851 @classmethod
2851 def get_active_cache(cls, cache_key):
2852 def get_active_cache(cls, cache_key):
2852 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2853 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2853 if inv_obj:
2854 if inv_obj:
2854 return inv_obj
2855 return inv_obj
2855 return None
2856 return None
2856
2857
2857 @classmethod
2858 @classmethod
2858 def repo_context_cache(cls, compute_func, repo_name, cache_type):
2859 def repo_context_cache(cls, compute_func, repo_name, cache_type):
2859 """
2860 """
2860 @cache_region('long_term')
2861 @cache_region('long_term')
2861 def _heavy_calculation(cache_key):
2862 def _heavy_calculation(cache_key):
2862 return 'result'
2863 return 'result'
2863
2864
2864 cache_context = CacheKey.repo_context_cache(
2865 cache_context = CacheKey.repo_context_cache(
2865 _heavy_calculation, repo_name, cache_type)
2866 _heavy_calculation, repo_name, cache_type)
2866
2867
2867 with cache_context as context:
2868 with cache_context as context:
2868 context.invalidate()
2869 context.invalidate()
2869 computed = context.compute()
2870 computed = context.compute()
2870
2871
2871 assert computed == 'result'
2872 assert computed == 'result'
2872 """
2873 """
2873 from rhodecode.lib import caches
2874 from rhodecode.lib import caches
2874 return caches.InvalidationContext(compute_func, repo_name, cache_type)
2875 return caches.InvalidationContext(compute_func, repo_name, cache_type)
2875
2876
2876
2877
2877 class ChangesetComment(Base, BaseModel):
2878 class ChangesetComment(Base, BaseModel):
2878 __tablename__ = 'changeset_comments'
2879 __tablename__ = 'changeset_comments'
2879 __table_args__ = (
2880 __table_args__ = (
2880 Index('cc_revision_idx', 'revision'),
2881 Index('cc_revision_idx', 'revision'),
2881 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2882 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2882 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2883 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2883 )
2884 )
2884
2885
2885 COMMENT_OUTDATED = u'comment_outdated'
2886 COMMENT_OUTDATED = u'comment_outdated'
2886
2887
2887 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2888 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2888 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2889 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2889 revision = Column('revision', String(40), nullable=True)
2890 revision = Column('revision', String(40), nullable=True)
2890 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2891 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2891 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2892 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2892 line_no = Column('line_no', Unicode(10), nullable=True)
2893 line_no = Column('line_no', Unicode(10), nullable=True)
2893 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2894 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2894 f_path = Column('f_path', Unicode(1000), nullable=True)
2895 f_path = Column('f_path', Unicode(1000), nullable=True)
2895 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2896 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2896 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2897 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2897 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2898 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2898 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2899 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2899 renderer = Column('renderer', Unicode(64), nullable=True)
2900 renderer = Column('renderer', Unicode(64), nullable=True)
2900 display_state = Column('display_state', Unicode(128), nullable=True)
2901 display_state = Column('display_state', Unicode(128), nullable=True)
2901
2902
2902 author = relationship('User', lazy='joined')
2903 author = relationship('User', lazy='joined')
2903 repo = relationship('Repository')
2904 repo = relationship('Repository')
2904 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2905 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2905 pull_request = relationship('PullRequest', lazy='joined')
2906 pull_request = relationship('PullRequest', lazy='joined')
2906 pull_request_version = relationship('PullRequestVersion')
2907 pull_request_version = relationship('PullRequestVersion')
2907
2908
2908 @classmethod
2909 @classmethod
2909 def get_users(cls, revision=None, pull_request_id=None):
2910 def get_users(cls, revision=None, pull_request_id=None):
2910 """
2911 """
2911 Returns user associated with this ChangesetComment. ie those
2912 Returns user associated with this ChangesetComment. ie those
2912 who actually commented
2913 who actually commented
2913
2914
2914 :param cls:
2915 :param cls:
2915 :param revision:
2916 :param revision:
2916 """
2917 """
2917 q = Session().query(User)\
2918 q = Session().query(User)\
2918 .join(ChangesetComment.author)
2919 .join(ChangesetComment.author)
2919 if revision:
2920 if revision:
2920 q = q.filter(cls.revision == revision)
2921 q = q.filter(cls.revision == revision)
2921 elif pull_request_id:
2922 elif pull_request_id:
2922 q = q.filter(cls.pull_request_id == pull_request_id)
2923 q = q.filter(cls.pull_request_id == pull_request_id)
2923 return q.all()
2924 return q.all()
2924
2925
2925 def render(self, mentions=False):
2926 def render(self, mentions=False):
2926 from rhodecode.lib import helpers as h
2927 from rhodecode.lib import helpers as h
2927 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2928 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2928
2929
2929 def __repr__(self):
2930 def __repr__(self):
2930 if self.comment_id:
2931 if self.comment_id:
2931 return '<DB:ChangesetComment #%s>' % self.comment_id
2932 return '<DB:ChangesetComment #%s>' % self.comment_id
2932 else:
2933 else:
2933 return '<DB:ChangesetComment at %#x>' % id(self)
2934 return '<DB:ChangesetComment at %#x>' % id(self)
2934
2935
2935
2936
2936 class ChangesetStatus(Base, BaseModel):
2937 class ChangesetStatus(Base, BaseModel):
2937 __tablename__ = 'changeset_statuses'
2938 __tablename__ = 'changeset_statuses'
2938 __table_args__ = (
2939 __table_args__ = (
2939 Index('cs_revision_idx', 'revision'),
2940 Index('cs_revision_idx', 'revision'),
2940 Index('cs_version_idx', 'version'),
2941 Index('cs_version_idx', 'version'),
2941 UniqueConstraint('repo_id', 'revision', 'version'),
2942 UniqueConstraint('repo_id', 'revision', 'version'),
2942 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2943 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2943 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2944 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2944 )
2945 )
2945 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2946 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2946 STATUS_APPROVED = 'approved'
2947 STATUS_APPROVED = 'approved'
2947 STATUS_REJECTED = 'rejected'
2948 STATUS_REJECTED = 'rejected'
2948 STATUS_UNDER_REVIEW = 'under_review'
2949 STATUS_UNDER_REVIEW = 'under_review'
2949
2950
2950 STATUSES = [
2951 STATUSES = [
2951 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
2952 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
2952 (STATUS_APPROVED, _("Approved")),
2953 (STATUS_APPROVED, _("Approved")),
2953 (STATUS_REJECTED, _("Rejected")),
2954 (STATUS_REJECTED, _("Rejected")),
2954 (STATUS_UNDER_REVIEW, _("Under Review")),
2955 (STATUS_UNDER_REVIEW, _("Under Review")),
2955 ]
2956 ]
2956
2957
2957 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
2958 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
2958 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2959 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2959 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
2960 revision = Column('revision', String(40), nullable=False)
2961 revision = Column('revision', String(40), nullable=False)
2961 status = Column('status', String(128), nullable=False, default=DEFAULT)
2962 status = Column('status', String(128), nullable=False, default=DEFAULT)
2962 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
2963 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
2963 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
2964 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
2964 version = Column('version', Integer(), nullable=False, default=0)
2965 version = Column('version', Integer(), nullable=False, default=0)
2965 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2966 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2966
2967
2967 author = relationship('User', lazy='joined')
2968 author = relationship('User', lazy='joined')
2968 repo = relationship('Repository')
2969 repo = relationship('Repository')
2969 comment = relationship('ChangesetComment', lazy='joined')
2970 comment = relationship('ChangesetComment', lazy='joined')
2970 pull_request = relationship('PullRequest', lazy='joined')
2971 pull_request = relationship('PullRequest', lazy='joined')
2971
2972
2972 def __unicode__(self):
2973 def __unicode__(self):
2973 return u"<%s('%s[%s]:%s')>" % (
2974 return u"<%s('%s[%s]:%s')>" % (
2974 self.__class__.__name__,
2975 self.__class__.__name__,
2975 self.status, self.version, self.author
2976 self.status, self.version, self.author
2976 )
2977 )
2977
2978
2978 @classmethod
2979 @classmethod
2979 def get_status_lbl(cls, value):
2980 def get_status_lbl(cls, value):
2980 return dict(cls.STATUSES).get(value)
2981 return dict(cls.STATUSES).get(value)
2981
2982
2982 @property
2983 @property
2983 def status_lbl(self):
2984 def status_lbl(self):
2984 return ChangesetStatus.get_status_lbl(self.status)
2985 return ChangesetStatus.get_status_lbl(self.status)
2985
2986
2986
2987
2987 class _PullRequestBase(BaseModel):
2988 class _PullRequestBase(BaseModel):
2988 """
2989 """
2989 Common attributes of pull request and version entries.
2990 Common attributes of pull request and version entries.
2990 """
2991 """
2991
2992
2992 # .status values
2993 # .status values
2993 STATUS_NEW = u'new'
2994 STATUS_NEW = u'new'
2994 STATUS_OPEN = u'open'
2995 STATUS_OPEN = u'open'
2995 STATUS_CLOSED = u'closed'
2996 STATUS_CLOSED = u'closed'
2996
2997
2997 title = Column('title', Unicode(255), nullable=True)
2998 title = Column('title', Unicode(255), nullable=True)
2998 description = Column(
2999 description = Column(
2999 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3000 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3000 nullable=True)
3001 nullable=True)
3001 # new/open/closed status of pull request (not approve/reject/etc)
3002 # new/open/closed status of pull request (not approve/reject/etc)
3002 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3003 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3003 created_on = Column(
3004 created_on = Column(
3004 'created_on', DateTime(timezone=False), nullable=False,
3005 'created_on', DateTime(timezone=False), nullable=False,
3005 default=datetime.datetime.now)
3006 default=datetime.datetime.now)
3006 updated_on = Column(
3007 updated_on = Column(
3007 'updated_on', DateTime(timezone=False), nullable=False,
3008 'updated_on', DateTime(timezone=False), nullable=False,
3008 default=datetime.datetime.now)
3009 default=datetime.datetime.now)
3009
3010
3010 @declared_attr
3011 @declared_attr
3011 def user_id(cls):
3012 def user_id(cls):
3012 return Column(
3013 return Column(
3013 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3014 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3014 unique=None)
3015 unique=None)
3015
3016
3016 # 500 revisions max
3017 # 500 revisions max
3017 _revisions = Column(
3018 _revisions = Column(
3018 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3019 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3019
3020
3020 @declared_attr
3021 @declared_attr
3021 def source_repo_id(cls):
3022 def source_repo_id(cls):
3022 # TODO: dan: rename column to source_repo_id
3023 # TODO: dan: rename column to source_repo_id
3023 return Column(
3024 return Column(
3024 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3025 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3025 nullable=False)
3026 nullable=False)
3026
3027
3027 source_ref = Column('org_ref', Unicode(255), nullable=False)
3028 source_ref = Column('org_ref', Unicode(255), nullable=False)
3028
3029
3029 @declared_attr
3030 @declared_attr
3030 def target_repo_id(cls):
3031 def target_repo_id(cls):
3031 # TODO: dan: rename column to target_repo_id
3032 # TODO: dan: rename column to target_repo_id
3032 return Column(
3033 return Column(
3033 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3034 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3034 nullable=False)
3035 nullable=False)
3035
3036
3036 target_ref = Column('other_ref', Unicode(255), nullable=False)
3037 target_ref = Column('other_ref', Unicode(255), nullable=False)
3037
3038
3038 # TODO: dan: rename column to last_merge_source_rev
3039 # TODO: dan: rename column to last_merge_source_rev
3039 _last_merge_source_rev = Column(
3040 _last_merge_source_rev = Column(
3040 'last_merge_org_rev', String(40), nullable=True)
3041 'last_merge_org_rev', String(40), nullable=True)
3041 # TODO: dan: rename column to last_merge_target_rev
3042 # TODO: dan: rename column to last_merge_target_rev
3042 _last_merge_target_rev = Column(
3043 _last_merge_target_rev = Column(
3043 'last_merge_other_rev', String(40), nullable=True)
3044 'last_merge_other_rev', String(40), nullable=True)
3044 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3045 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3045 merge_rev = Column('merge_rev', String(40), nullable=True)
3046 merge_rev = Column('merge_rev', String(40), nullable=True)
3046
3047
3047 @hybrid_property
3048 @hybrid_property
3048 def revisions(self):
3049 def revisions(self):
3049 return self._revisions.split(':') if self._revisions else []
3050 return self._revisions.split(':') if self._revisions else []
3050
3051
3051 @revisions.setter
3052 @revisions.setter
3052 def revisions(self, val):
3053 def revisions(self, val):
3053 self._revisions = ':'.join(val)
3054 self._revisions = ':'.join(val)
3054
3055
3055 @declared_attr
3056 @declared_attr
3056 def author(cls):
3057 def author(cls):
3057 return relationship('User', lazy='joined')
3058 return relationship('User', lazy='joined')
3058
3059
3059 @declared_attr
3060 @declared_attr
3060 def source_repo(cls):
3061 def source_repo(cls):
3061 return relationship(
3062 return relationship(
3062 'Repository',
3063 'Repository',
3063 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3064 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3064
3065
3065 @property
3066 @property
3066 def source_ref_parts(self):
3067 def source_ref_parts(self):
3067 refs = self.source_ref.split(':')
3068 refs = self.source_ref.split(':')
3068 return Reference(refs[0], refs[1], refs[2])
3069 return Reference(refs[0], refs[1], refs[2])
3069
3070
3070 @declared_attr
3071 @declared_attr
3071 def target_repo(cls):
3072 def target_repo(cls):
3072 return relationship(
3073 return relationship(
3073 'Repository',
3074 'Repository',
3074 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3075 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3075
3076
3076 @property
3077 @property
3077 def target_ref_parts(self):
3078 def target_ref_parts(self):
3078 refs = self.target_ref.split(':')
3079 refs = self.target_ref.split(':')
3079 return Reference(refs[0], refs[1], refs[2])
3080 return Reference(refs[0], refs[1], refs[2])
3080
3081
3081
3082
3082 class PullRequest(Base, _PullRequestBase):
3083 class PullRequest(Base, _PullRequestBase):
3083 __tablename__ = 'pull_requests'
3084 __tablename__ = 'pull_requests'
3084 __table_args__ = (
3085 __table_args__ = (
3085 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3086 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3086 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3087 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3087 )
3088 )
3088
3089
3089 pull_request_id = Column(
3090 pull_request_id = Column(
3090 'pull_request_id', Integer(), nullable=False, primary_key=True)
3091 'pull_request_id', Integer(), nullable=False, primary_key=True)
3091
3092
3092 def __repr__(self):
3093 def __repr__(self):
3093 if self.pull_request_id:
3094 if self.pull_request_id:
3094 return '<DB:PullRequest #%s>' % self.pull_request_id
3095 return '<DB:PullRequest #%s>' % self.pull_request_id
3095 else:
3096 else:
3096 return '<DB:PullRequest at %#x>' % id(self)
3097 return '<DB:PullRequest at %#x>' % id(self)
3097
3098
3098 reviewers = relationship('PullRequestReviewers',
3099 reviewers = relationship('PullRequestReviewers',
3099 cascade="all, delete, delete-orphan")
3100 cascade="all, delete, delete-orphan")
3100 statuses = relationship('ChangesetStatus')
3101 statuses = relationship('ChangesetStatus')
3101 comments = relationship('ChangesetComment',
3102 comments = relationship('ChangesetComment',
3102 cascade="all, delete, delete-orphan")
3103 cascade="all, delete, delete-orphan")
3103 versions = relationship('PullRequestVersion',
3104 versions = relationship('PullRequestVersion',
3104 cascade="all, delete, delete-orphan")
3105 cascade="all, delete, delete-orphan")
3105
3106
3106 def is_closed(self):
3107 def is_closed(self):
3107 return self.status == self.STATUS_CLOSED
3108 return self.status == self.STATUS_CLOSED
3108
3109
3109 def get_api_data(self):
3110 def get_api_data(self):
3110 from rhodecode.model.pull_request import PullRequestModel
3111 from rhodecode.model.pull_request import PullRequestModel
3111 pull_request = self
3112 pull_request = self
3112 merge_status = PullRequestModel().merge_status(pull_request)
3113 merge_status = PullRequestModel().merge_status(pull_request)
3113 data = {
3114 data = {
3114 'pull_request_id': pull_request.pull_request_id,
3115 'pull_request_id': pull_request.pull_request_id,
3115 'url': url('pullrequest_show',
3116 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name,
3116 repo_name=pull_request.target_repo.repo_name,
3117 pull_request_id=self.pull_request_id,
3117 pull_request_id=pull_request.pull_request_id,
3118 qualified=True),
3118 qualified=True),
3119 'title': pull_request.title,
3119 'title': pull_request.title,
3120 'description': pull_request.description,
3120 'description': pull_request.description,
3121 'status': pull_request.status,
3121 'status': pull_request.status,
3122 'created_on': pull_request.created_on,
3122 'created_on': pull_request.created_on,
3123 'updated_on': pull_request.updated_on,
3123 'updated_on': pull_request.updated_on,
3124 'commit_ids': pull_request.revisions,
3124 'commit_ids': pull_request.revisions,
3125 'review_status': pull_request.calculated_review_status(),
3125 'review_status': pull_request.calculated_review_status(),
3126 'mergeable': {
3126 'mergeable': {
3127 'status': merge_status[0],
3127 'status': merge_status[0],
3128 'message': unicode(merge_status[1]),
3128 'message': unicode(merge_status[1]),
3129 },
3129 },
3130 'source': {
3130 'source': {
3131 'clone_url': pull_request.source_repo.clone_url(),
3131 'clone_url': pull_request.source_repo.clone_url(),
3132 'repository': pull_request.source_repo.repo_name,
3132 'repository': pull_request.source_repo.repo_name,
3133 'reference': {
3133 'reference': {
3134 'name': pull_request.source_ref_parts.name,
3134 'name': pull_request.source_ref_parts.name,
3135 'type': pull_request.source_ref_parts.type,
3135 'type': pull_request.source_ref_parts.type,
3136 'commit_id': pull_request.source_ref_parts.commit_id,
3136 'commit_id': pull_request.source_ref_parts.commit_id,
3137 },
3137 },
3138 },
3138 },
3139 'target': {
3139 'target': {
3140 'clone_url': pull_request.target_repo.clone_url(),
3140 'clone_url': pull_request.target_repo.clone_url(),
3141 'repository': pull_request.target_repo.repo_name,
3141 'repository': pull_request.target_repo.repo_name,
3142 'reference': {
3142 'reference': {
3143 'name': pull_request.target_ref_parts.name,
3143 'name': pull_request.target_ref_parts.name,
3144 'type': pull_request.target_ref_parts.type,
3144 'type': pull_request.target_ref_parts.type,
3145 'commit_id': pull_request.target_ref_parts.commit_id,
3145 'commit_id': pull_request.target_ref_parts.commit_id,
3146 },
3146 },
3147 },
3147 },
3148 'author': pull_request.author.get_api_data(include_secrets=False,
3148 'author': pull_request.author.get_api_data(include_secrets=False,
3149 details='basic'),
3149 details='basic'),
3150 'reviewers': [
3150 'reviewers': [
3151 {
3151 {
3152 'user': reviewer.get_api_data(include_secrets=False,
3152 'user': reviewer.get_api_data(include_secrets=False,
3153 details='basic'),
3153 details='basic'),
3154 'review_status': st[0][1].status if st else 'not_reviewed',
3154 'review_status': st[0][1].status if st else 'not_reviewed',
3155 }
3155 }
3156 for reviewer, st in pull_request.reviewers_statuses()
3156 for reviewer, st in pull_request.reviewers_statuses()
3157 ]
3157 ]
3158 }
3158 }
3159
3159
3160 return data
3160 return data
3161
3161
3162 def __json__(self):
3162 def __json__(self):
3163 return {
3163 return {
3164 'revisions': self.revisions,
3164 'revisions': self.revisions,
3165 }
3165 }
3166
3166
3167 def calculated_review_status(self):
3167 def calculated_review_status(self):
3168 # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html
3168 # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html
3169 # because it's tricky on how to use ChangesetStatusModel from there
3169 # because it's tricky on how to use ChangesetStatusModel from there
3170 warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning)
3170 warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning)
3171 from rhodecode.model.changeset_status import ChangesetStatusModel
3171 from rhodecode.model.changeset_status import ChangesetStatusModel
3172 return ChangesetStatusModel().calculated_review_status(self)
3172 return ChangesetStatusModel().calculated_review_status(self)
3173
3173
3174 def reviewers_statuses(self):
3174 def reviewers_statuses(self):
3175 warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning)
3175 warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning)
3176 from rhodecode.model.changeset_status import ChangesetStatusModel
3176 from rhodecode.model.changeset_status import ChangesetStatusModel
3177 return ChangesetStatusModel().reviewers_statuses(self)
3177 return ChangesetStatusModel().reviewers_statuses(self)
3178
3178
3179
3179
3180 class PullRequestVersion(Base, _PullRequestBase):
3180 class PullRequestVersion(Base, _PullRequestBase):
3181 __tablename__ = 'pull_request_versions'
3181 __tablename__ = 'pull_request_versions'
3182 __table_args__ = (
3182 __table_args__ = (
3183 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3183 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3184 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3184 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3185 )
3185 )
3186
3186
3187 pull_request_version_id = Column(
3187 pull_request_version_id = Column(
3188 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3188 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3189 pull_request_id = Column(
3189 pull_request_id = Column(
3190 'pull_request_id', Integer(),
3190 'pull_request_id', Integer(),
3191 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3191 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3192 pull_request = relationship('PullRequest')
3192 pull_request = relationship('PullRequest')
3193
3193
3194 def __repr__(self):
3194 def __repr__(self):
3195 if self.pull_request_version_id:
3195 if self.pull_request_version_id:
3196 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3196 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3197 else:
3197 else:
3198 return '<DB:PullRequestVersion at %#x>' % id(self)
3198 return '<DB:PullRequestVersion at %#x>' % id(self)
3199
3199
3200
3200
3201 class PullRequestReviewers(Base, BaseModel):
3201 class PullRequestReviewers(Base, BaseModel):
3202 __tablename__ = 'pull_request_reviewers'
3202 __tablename__ = 'pull_request_reviewers'
3203 __table_args__ = (
3203 __table_args__ = (
3204 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3204 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3205 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3205 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3206 )
3206 )
3207
3207
3208 def __init__(self, user=None, pull_request=None):
3208 def __init__(self, user=None, pull_request=None):
3209 self.user = user
3209 self.user = user
3210 self.pull_request = pull_request
3210 self.pull_request = pull_request
3211
3211
3212 pull_requests_reviewers_id = Column(
3212 pull_requests_reviewers_id = Column(
3213 'pull_requests_reviewers_id', Integer(), nullable=False,
3213 'pull_requests_reviewers_id', Integer(), nullable=False,
3214 primary_key=True)
3214 primary_key=True)
3215 pull_request_id = Column(
3215 pull_request_id = Column(
3216 "pull_request_id", Integer(),
3216 "pull_request_id", Integer(),
3217 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3217 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3218 user_id = Column(
3218 user_id = Column(
3219 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3219 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3220
3220
3221 user = relationship('User')
3221 user = relationship('User')
3222 pull_request = relationship('PullRequest')
3222 pull_request = relationship('PullRequest')
3223
3223
3224
3224
3225 class Notification(Base, BaseModel):
3225 class Notification(Base, BaseModel):
3226 __tablename__ = 'notifications'
3226 __tablename__ = 'notifications'
3227 __table_args__ = (
3227 __table_args__ = (
3228 Index('notification_type_idx', 'type'),
3228 Index('notification_type_idx', 'type'),
3229 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3229 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3230 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3230 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3231 )
3231 )
3232
3232
3233 TYPE_CHANGESET_COMMENT = u'cs_comment'
3233 TYPE_CHANGESET_COMMENT = u'cs_comment'
3234 TYPE_MESSAGE = u'message'
3234 TYPE_MESSAGE = u'message'
3235 TYPE_MENTION = u'mention'
3235 TYPE_MENTION = u'mention'
3236 TYPE_REGISTRATION = u'registration'
3236 TYPE_REGISTRATION = u'registration'
3237 TYPE_PULL_REQUEST = u'pull_request'
3237 TYPE_PULL_REQUEST = u'pull_request'
3238 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3238 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3239
3239
3240 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3240 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3241 subject = Column('subject', Unicode(512), nullable=True)
3241 subject = Column('subject', Unicode(512), nullable=True)
3242 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3242 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3243 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3243 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3244 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3244 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3245 type_ = Column('type', Unicode(255))
3245 type_ = Column('type', Unicode(255))
3246
3246
3247 created_by_user = relationship('User')
3247 created_by_user = relationship('User')
3248 notifications_to_users = relationship('UserNotification', lazy='joined',
3248 notifications_to_users = relationship('UserNotification', lazy='joined',
3249 cascade="all, delete, delete-orphan")
3249 cascade="all, delete, delete-orphan")
3250
3250
3251 @property
3251 @property
3252 def recipients(self):
3252 def recipients(self):
3253 return [x.user for x in UserNotification.query()\
3253 return [x.user for x in UserNotification.query()\
3254 .filter(UserNotification.notification == self)\
3254 .filter(UserNotification.notification == self)\
3255 .order_by(UserNotification.user_id.asc()).all()]
3255 .order_by(UserNotification.user_id.asc()).all()]
3256
3256
3257 @classmethod
3257 @classmethod
3258 def create(cls, created_by, subject, body, recipients, type_=None):
3258 def create(cls, created_by, subject, body, recipients, type_=None):
3259 if type_ is None:
3259 if type_ is None:
3260 type_ = Notification.TYPE_MESSAGE
3260 type_ = Notification.TYPE_MESSAGE
3261
3261
3262 notification = cls()
3262 notification = cls()
3263 notification.created_by_user = created_by
3263 notification.created_by_user = created_by
3264 notification.subject = subject
3264 notification.subject = subject
3265 notification.body = body
3265 notification.body = body
3266 notification.type_ = type_
3266 notification.type_ = type_
3267 notification.created_on = datetime.datetime.now()
3267 notification.created_on = datetime.datetime.now()
3268
3268
3269 for u in recipients:
3269 for u in recipients:
3270 assoc = UserNotification()
3270 assoc = UserNotification()
3271 assoc.notification = notification
3271 assoc.notification = notification
3272
3272
3273 # if created_by is inside recipients mark his notification
3273 # if created_by is inside recipients mark his notification
3274 # as read
3274 # as read
3275 if u.user_id == created_by.user_id:
3275 if u.user_id == created_by.user_id:
3276 assoc.read = True
3276 assoc.read = True
3277
3277
3278 u.notifications.append(assoc)
3278 u.notifications.append(assoc)
3279 Session().add(notification)
3279 Session().add(notification)
3280
3280
3281 return notification
3281 return notification
3282
3282
3283 @property
3283 @property
3284 def description(self):
3284 def description(self):
3285 from rhodecode.model.notification import NotificationModel
3285 from rhodecode.model.notification import NotificationModel
3286 return NotificationModel().make_description(self)
3286 return NotificationModel().make_description(self)
3287
3287
3288
3288
3289 class UserNotification(Base, BaseModel):
3289 class UserNotification(Base, BaseModel):
3290 __tablename__ = 'user_to_notification'
3290 __tablename__ = 'user_to_notification'
3291 __table_args__ = (
3291 __table_args__ = (
3292 UniqueConstraint('user_id', 'notification_id'),
3292 UniqueConstraint('user_id', 'notification_id'),
3293 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3293 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3294 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3294 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3295 )
3295 )
3296 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3296 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3297 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3297 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3298 read = Column('read', Boolean, default=False)
3298 read = Column('read', Boolean, default=False)
3299 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3299 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3300
3300
3301 user = relationship('User', lazy="joined")
3301 user = relationship('User', lazy="joined")
3302 notification = relationship('Notification', lazy="joined",
3302 notification = relationship('Notification', lazy="joined",
3303 order_by=lambda: Notification.created_on.desc(),)
3303 order_by=lambda: Notification.created_on.desc(),)
3304
3304
3305 def mark_as_read(self):
3305 def mark_as_read(self):
3306 self.read = True
3306 self.read = True
3307 Session().add(self)
3307 Session().add(self)
3308
3308
3309
3309
3310 class Gist(Base, BaseModel):
3310 class Gist(Base, BaseModel):
3311 __tablename__ = 'gists'
3311 __tablename__ = 'gists'
3312 __table_args__ = (
3312 __table_args__ = (
3313 Index('g_gist_access_id_idx', 'gist_access_id'),
3313 Index('g_gist_access_id_idx', 'gist_access_id'),
3314 Index('g_created_on_idx', 'created_on'),
3314 Index('g_created_on_idx', 'created_on'),
3315 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3315 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3316 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3316 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3317 )
3317 )
3318 GIST_PUBLIC = u'public'
3318 GIST_PUBLIC = u'public'
3319 GIST_PRIVATE = u'private'
3319 GIST_PRIVATE = u'private'
3320 DEFAULT_FILENAME = u'gistfile1.txt'
3320 DEFAULT_FILENAME = u'gistfile1.txt'
3321
3321
3322 ACL_LEVEL_PUBLIC = u'acl_public'
3322 ACL_LEVEL_PUBLIC = u'acl_public'
3323 ACL_LEVEL_PRIVATE = u'acl_private'
3323 ACL_LEVEL_PRIVATE = u'acl_private'
3324
3324
3325 gist_id = Column('gist_id', Integer(), primary_key=True)
3325 gist_id = Column('gist_id', Integer(), primary_key=True)
3326 gist_access_id = Column('gist_access_id', Unicode(250))
3326 gist_access_id = Column('gist_access_id', Unicode(250))
3327 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3327 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3328 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3328 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3329 gist_expires = Column('gist_expires', Float(53), nullable=False)
3329 gist_expires = Column('gist_expires', Float(53), nullable=False)
3330 gist_type = Column('gist_type', Unicode(128), nullable=False)
3330 gist_type = Column('gist_type', Unicode(128), nullable=False)
3331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3332 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3332 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3333 acl_level = Column('acl_level', Unicode(128), nullable=True)
3333 acl_level = Column('acl_level', Unicode(128), nullable=True)
3334
3334
3335 owner = relationship('User')
3335 owner = relationship('User')
3336
3336
3337 def __repr__(self):
3337 def __repr__(self):
3338 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3338 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3339
3339
3340 @classmethod
3340 @classmethod
3341 def get_or_404(cls, id_):
3341 def get_or_404(cls, id_):
3342 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3342 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3343 if not res:
3343 if not res:
3344 raise HTTPNotFound
3344 raise HTTPNotFound
3345 return res
3345 return res
3346
3346
3347 @classmethod
3347 @classmethod
3348 def get_by_access_id(cls, gist_access_id):
3348 def get_by_access_id(cls, gist_access_id):
3349 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3349 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3350
3350
3351 def gist_url(self):
3351 def gist_url(self):
3352 import rhodecode
3352 import rhodecode
3353 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3353 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3354 if alias_url:
3354 if alias_url:
3355 return alias_url.replace('{gistid}', self.gist_access_id)
3355 return alias_url.replace('{gistid}', self.gist_access_id)
3356
3356
3357 return url('gist', gist_id=self.gist_access_id, qualified=True)
3357 return url('gist', gist_id=self.gist_access_id, qualified=True)
3358
3358
3359 @classmethod
3359 @classmethod
3360 def base_path(cls):
3360 def base_path(cls):
3361 """
3361 """
3362 Returns base path when all gists are stored
3362 Returns base path when all gists are stored
3363
3363
3364 :param cls:
3364 :param cls:
3365 """
3365 """
3366 from rhodecode.model.gist import GIST_STORE_LOC
3366 from rhodecode.model.gist import GIST_STORE_LOC
3367 q = Session().query(RhodeCodeUi)\
3367 q = Session().query(RhodeCodeUi)\
3368 .filter(RhodeCodeUi.ui_key == URL_SEP)
3368 .filter(RhodeCodeUi.ui_key == URL_SEP)
3369 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3369 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3370 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3370 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3371
3371
3372 def get_api_data(self):
3372 def get_api_data(self):
3373 """
3373 """
3374 Common function for generating gist related data for API
3374 Common function for generating gist related data for API
3375 """
3375 """
3376 gist = self
3376 gist = self
3377 data = {
3377 data = {
3378 'gist_id': gist.gist_id,
3378 'gist_id': gist.gist_id,
3379 'type': gist.gist_type,
3379 'type': gist.gist_type,
3380 'access_id': gist.gist_access_id,
3380 'access_id': gist.gist_access_id,
3381 'description': gist.gist_description,
3381 'description': gist.gist_description,
3382 'url': gist.gist_url(),
3382 'url': gist.gist_url(),
3383 'expires': gist.gist_expires,
3383 'expires': gist.gist_expires,
3384 'created_on': gist.created_on,
3384 'created_on': gist.created_on,
3385 'modified_at': gist.modified_at,
3385 'modified_at': gist.modified_at,
3386 'content': None,
3386 'content': None,
3387 'acl_level': gist.acl_level,
3387 'acl_level': gist.acl_level,
3388 }
3388 }
3389 return data
3389 return data
3390
3390
3391 def __json__(self):
3391 def __json__(self):
3392 data = dict(
3392 data = dict(
3393 )
3393 )
3394 data.update(self.get_api_data())
3394 data.update(self.get_api_data())
3395 return data
3395 return data
3396 # SCM functions
3396 # SCM functions
3397
3397
3398 def scm_instance(self, **kwargs):
3398 def scm_instance(self, **kwargs):
3399 from rhodecode.lib.vcs import get_repo
3399 from rhodecode.lib.vcs import get_repo
3400 base_path = self.base_path()
3400 base_path = self.base_path()
3401 return get_repo(os.path.join(*map(safe_str,
3401 return get_repo(os.path.join(*map(safe_str,
3402 [base_path, self.gist_access_id])))
3402 [base_path, self.gist_access_id])))
3403
3403
3404
3404
3405 class DbMigrateVersion(Base, BaseModel):
3405 class DbMigrateVersion(Base, BaseModel):
3406 __tablename__ = 'db_migrate_version'
3406 __tablename__ = 'db_migrate_version'
3407 __table_args__ = (
3407 __table_args__ = (
3408 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3408 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3409 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3409 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3410 )
3410 )
3411 repository_id = Column('repository_id', String(250), primary_key=True)
3411 repository_id = Column('repository_id', String(250), primary_key=True)
3412 repository_path = Column('repository_path', Text)
3412 repository_path = Column('repository_path', Text)
3413 version = Column('version', Integer)
3413 version = Column('version', Integer)
3414
3414
3415
3415
3416 class ExternalIdentity(Base, BaseModel):
3416 class ExternalIdentity(Base, BaseModel):
3417 __tablename__ = 'external_identities'
3417 __tablename__ = 'external_identities'
3418 __table_args__ = (
3418 __table_args__ = (
3419 Index('local_user_id_idx', 'local_user_id'),
3419 Index('local_user_id_idx', 'local_user_id'),
3420 Index('external_id_idx', 'external_id'),
3420 Index('external_id_idx', 'external_id'),
3421 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3421 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3422 'mysql_charset': 'utf8'})
3422 'mysql_charset': 'utf8'})
3423
3423
3424 external_id = Column('external_id', Unicode(255), default=u'',
3424 external_id = Column('external_id', Unicode(255), default=u'',
3425 primary_key=True)
3425 primary_key=True)
3426 external_username = Column('external_username', Unicode(1024), default=u'')
3426 external_username = Column('external_username', Unicode(1024), default=u'')
3427 local_user_id = Column('local_user_id', Integer(),
3427 local_user_id = Column('local_user_id', Integer(),
3428 ForeignKey('users.user_id'), primary_key=True)
3428 ForeignKey('users.user_id'), primary_key=True)
3429 provider_name = Column('provider_name', Unicode(255), default=u'',
3429 provider_name = Column('provider_name', Unicode(255), default=u'',
3430 primary_key=True)
3430 primary_key=True)
3431 access_token = Column('access_token', String(1024), default=u'')
3431 access_token = Column('access_token', String(1024), default=u'')
3432 alt_token = Column('alt_token', String(1024), default=u'')
3432 alt_token = Column('alt_token', String(1024), default=u'')
3433 token_secret = Column('token_secret', String(1024), default=u'')
3433 token_secret = Column('token_secret', String(1024), default=u'')
3434
3434
3435 @classmethod
3435 @classmethod
3436 def by_external_id_and_provider(cls, external_id, provider_name,
3436 def by_external_id_and_provider(cls, external_id, provider_name,
3437 local_user_id=None):
3437 local_user_id=None):
3438 """
3438 """
3439 Returns ExternalIdentity instance based on search params
3439 Returns ExternalIdentity instance based on search params
3440
3440
3441 :param external_id:
3441 :param external_id:
3442 :param provider_name:
3442 :param provider_name:
3443 :return: ExternalIdentity
3443 :return: ExternalIdentity
3444 """
3444 """
3445 query = cls.query()
3445 query = cls.query()
3446 query = query.filter(cls.external_id == external_id)
3446 query = query.filter(cls.external_id == external_id)
3447 query = query.filter(cls.provider_name == provider_name)
3447 query = query.filter(cls.provider_name == provider_name)
3448 if local_user_id:
3448 if local_user_id:
3449 query = query.filter(cls.local_user_id == local_user_id)
3449 query = query.filter(cls.local_user_id == local_user_id)
3450 return query.first()
3450 return query.first()
3451
3451
3452 @classmethod
3452 @classmethod
3453 def user_by_external_id_and_provider(cls, external_id, provider_name):
3453 def user_by_external_id_and_provider(cls, external_id, provider_name):
3454 """
3454 """
3455 Returns User instance based on search params
3455 Returns User instance based on search params
3456
3456
3457 :param external_id:
3457 :param external_id:
3458 :param provider_name:
3458 :param provider_name:
3459 :return: User
3459 :return: User
3460 """
3460 """
3461 query = User.query()
3461 query = User.query()
3462 query = query.filter(cls.external_id == external_id)
3462 query = query.filter(cls.external_id == external_id)
3463 query = query.filter(cls.provider_name == provider_name)
3463 query = query.filter(cls.provider_name == provider_name)
3464 query = query.filter(User.user_id == cls.local_user_id)
3464 query = query.filter(User.user_id == cls.local_user_id)
3465 return query.first()
3465 return query.first()
3466
3466
3467 @classmethod
3467 @classmethod
3468 def by_local_user_id(cls, local_user_id):
3468 def by_local_user_id(cls, local_user_id):
3469 """
3469 """
3470 Returns all tokens for user
3470 Returns all tokens for user
3471
3471
3472 :param local_user_id:
3472 :param local_user_id:
3473 :return: ExternalIdentity
3473 :return: ExternalIdentity
3474 """
3474 """
3475 query = cls.query()
3475 query = cls.query()
3476 query = query.filter(cls.local_user_id == local_user_id)
3476 query = query.filter(cls.local_user_id == local_user_id)
3477 return query
3477 return query
@@ -1,1148 +1,1153 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26 from collections import namedtuple
26 from collections import namedtuple
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30
30
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from pylons.i18n.translation import lazy_ugettext
32 from pylons.i18n.translation import lazy_ugettext
33
33
34 import rhodecode
34 import rhodecode
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 from rhodecode.lib.compat import OrderedDict
36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 from rhodecode.lib.markup_renderer import (
38 from rhodecode.lib.markup_renderer import (
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 from rhodecode.lib.utils import action_logger
40 from rhodecode.lib.utils import action_logger
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 from rhodecode.lib.vcs.backends.base import (
42 from rhodecode.lib.vcs.backends.base import (
43 Reference, MergeResponse, MergeFailureReason)
43 Reference, MergeResponse, MergeFailureReason)
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, EmptyRepositoryError)
45 CommitDoesNotExistError, EmptyRepositoryError)
46 from rhodecode.model import BaseModel
46 from rhodecode.model import BaseModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.comment import ChangesetCommentsModel
48 from rhodecode.model.comment import ChangesetCommentsModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 PullRequestVersion, ChangesetComment)
51 PullRequestVersion, ChangesetComment)
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53 from rhodecode.model.notification import NotificationModel, \
53 from rhodecode.model.notification import NotificationModel, \
54 EmailNotificationModel
54 EmailNotificationModel
55 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.scm import ScmModel
56 from rhodecode.model.settings import VcsSettingsModel
56 from rhodecode.model.settings import VcsSettingsModel
57
57
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class PullRequestModel(BaseModel):
62 class PullRequestModel(BaseModel):
63
63
64 cls = PullRequest
64 cls = PullRequest
65
65
66 DIFF_CONTEXT = 3
66 DIFF_CONTEXT = 3
67
67
68 MERGE_STATUS_MESSAGES = {
68 MERGE_STATUS_MESSAGES = {
69 MergeFailureReason.NONE: lazy_ugettext(
69 MergeFailureReason.NONE: lazy_ugettext(
70 'This pull request can be automatically merged.'),
70 'This pull request can be automatically merged.'),
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 'This pull request cannot be merged because of an unhandled'
72 'This pull request cannot be merged because of an unhandled'
73 ' exception.'),
73 ' exception.'),
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 'This pull request cannot be merged because of conflicts.'),
75 'This pull request cannot be merged because of conflicts.'),
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 'This pull request could not be merged because push to target'
77 'This pull request could not be merged because push to target'
78 ' failed.'),
78 ' failed.'),
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 'This pull request cannot be merged because the target is not a'
80 'This pull request cannot be merged because the target is not a'
81 ' head.'),
81 ' head.'),
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 'This pull request cannot be merged because the source contains'
83 'This pull request cannot be merged because the source contains'
84 ' more branches than the target.'),
84 ' more branches than the target.'),
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 'This pull request cannot be merged because the target has'
86 'This pull request cannot be merged because the target has'
87 ' multiple heads.'),
87 ' multiple heads.'),
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 'This pull request cannot be merged because the target repository'
89 'This pull request cannot be merged because the target repository'
90 ' is locked.'),
90 ' is locked.'),
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 'This pull request cannot be merged because the target or the '
92 'This pull request cannot be merged because the target or the '
93 'source reference is missing.'),
93 'source reference is missing.'),
94 }
94 }
95
95
96 def __get_pull_request(self, pull_request):
96 def __get_pull_request(self, pull_request):
97 return self._get_instance(PullRequest, pull_request)
97 return self._get_instance(PullRequest, pull_request)
98
98
99 def _check_perms(self, perms, pull_request, user, api=False):
99 def _check_perms(self, perms, pull_request, user, api=False):
100 if not api:
100 if not api:
101 return h.HasRepoPermissionAny(*perms)(
101 return h.HasRepoPermissionAny(*perms)(
102 user=user, repo_name=pull_request.target_repo.repo_name)
102 user=user, repo_name=pull_request.target_repo.repo_name)
103 else:
103 else:
104 return h.HasRepoPermissionAnyApi(*perms)(
104 return h.HasRepoPermissionAnyApi(*perms)(
105 user=user, repo_name=pull_request.target_repo.repo_name)
105 user=user, repo_name=pull_request.target_repo.repo_name)
106
106
107 def check_user_read(self, pull_request, user, api=False):
107 def check_user_read(self, pull_request, user, api=False):
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 return self._check_perms(_perms, pull_request, user, api)
109 return self._check_perms(_perms, pull_request, user, api)
110
110
111 def check_user_merge(self, pull_request, user, api=False):
111 def check_user_merge(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_update(self, pull_request, user, api=False):
115 def check_user_update(self, pull_request, user, api=False):
116 owner = user.user_id == pull_request.user_id
116 owner = user.user_id == pull_request.user_id
117 return self.check_user_merge(pull_request, user, api) or owner
117 return self.check_user_merge(pull_request, user, api) or owner
118
118
119 def check_user_change_status(self, pull_request, user, api=False):
119 def check_user_change_status(self, pull_request, user, api=False):
120 reviewer = user.user_id in [x.user_id for x in
120 reviewer = user.user_id in [x.user_id for x in
121 pull_request.reviewers]
121 pull_request.reviewers]
122 return self.check_user_update(pull_request, user, api) or reviewer
122 return self.check_user_update(pull_request, user, api) or reviewer
123
123
124 def get(self, pull_request):
124 def get(self, pull_request):
125 return self.__get_pull_request(pull_request)
125 return self.__get_pull_request(pull_request)
126
126
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 opened_by=None, order_by=None,
128 opened_by=None, order_by=None,
129 order_dir='desc'):
129 order_dir='desc'):
130 repo = self._get_repo(repo_name)
130 repo = self._get_repo(repo_name)
131 q = PullRequest.query()
131 q = PullRequest.query()
132 # source or target
132 # source or target
133 if source:
133 if source:
134 q = q.filter(PullRequest.source_repo == repo)
134 q = q.filter(PullRequest.source_repo == repo)
135 else:
135 else:
136 q = q.filter(PullRequest.target_repo == repo)
136 q = q.filter(PullRequest.target_repo == repo)
137
137
138 # closed,opened
138 # closed,opened
139 if statuses:
139 if statuses:
140 q = q.filter(PullRequest.status.in_(statuses))
140 q = q.filter(PullRequest.status.in_(statuses))
141
141
142 # opened by filter
142 # opened by filter
143 if opened_by:
143 if opened_by:
144 q = q.filter(PullRequest.user_id.in_(opened_by))
144 q = q.filter(PullRequest.user_id.in_(opened_by))
145
145
146 if order_by:
146 if order_by:
147 order_map = {
147 order_map = {
148 'name_raw': PullRequest.pull_request_id,
148 'name_raw': PullRequest.pull_request_id,
149 'title': PullRequest.title,
149 'title': PullRequest.title,
150 'updated_on_raw': PullRequest.updated_on
150 'updated_on_raw': PullRequest.updated_on
151 }
151 }
152 if order_dir == 'asc':
152 if order_dir == 'asc':
153 q = q.order_by(order_map[order_by].asc())
153 q = q.order_by(order_map[order_by].asc())
154 else:
154 else:
155 q = q.order_by(order_map[order_by].desc())
155 q = q.order_by(order_map[order_by].desc())
156
156
157 return q
157 return q
158
158
159 def count_all(self, repo_name, source=False, statuses=None,
159 def count_all(self, repo_name, source=False, statuses=None,
160 opened_by=None):
160 opened_by=None):
161 """
161 """
162 Count the number of pull requests for a specific repository.
162 Count the number of pull requests for a specific repository.
163
163
164 :param repo_name: target or source repo
164 :param repo_name: target or source repo
165 :param source: boolean flag to specify if repo_name refers to source
165 :param source: boolean flag to specify if repo_name refers to source
166 :param statuses: list of pull request statuses
166 :param statuses: list of pull request statuses
167 :param opened_by: author user of the pull request
167 :param opened_by: author user of the pull request
168 :returns: int number of pull requests
168 :returns: int number of pull requests
169 """
169 """
170 q = self._prepare_get_all_query(
170 q = self._prepare_get_all_query(
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172
172
173 return q.count()
173 return q.count()
174
174
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 offset=0, length=None, order_by=None, order_dir='desc'):
176 offset=0, length=None, order_by=None, order_dir='desc'):
177 """
177 """
178 Get all pull requests for a specific repository.
178 Get all pull requests for a specific repository.
179
179
180 :param repo_name: target or source repo
180 :param repo_name: target or source repo
181 :param source: boolean flag to specify if repo_name refers to source
181 :param source: boolean flag to specify if repo_name refers to source
182 :param statuses: list of pull request statuses
182 :param statuses: list of pull request statuses
183 :param opened_by: author user of the pull request
183 :param opened_by: author user of the pull request
184 :param offset: pagination offset
184 :param offset: pagination offset
185 :param length: length of returned list
185 :param length: length of returned list
186 :param order_by: order of the returned list
186 :param order_by: order of the returned list
187 :param order_dir: 'asc' or 'desc' ordering direction
187 :param order_dir: 'asc' or 'desc' ordering direction
188 :returns: list of pull requests
188 :returns: list of pull requests
189 """
189 """
190 q = self._prepare_get_all_query(
190 q = self._prepare_get_all_query(
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 order_by=order_by, order_dir=order_dir)
192 order_by=order_by, order_dir=order_dir)
193
193
194 if length:
194 if length:
195 pull_requests = q.limit(length).offset(offset).all()
195 pull_requests = q.limit(length).offset(offset).all()
196 else:
196 else:
197 pull_requests = q.all()
197 pull_requests = q.all()
198
198
199 return pull_requests
199 return pull_requests
200
200
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 opened_by=None):
202 opened_by=None):
203 """
203 """
204 Count the number of pull requests for a specific repository that are
204 Count the number of pull requests for a specific repository that are
205 awaiting review.
205 awaiting review.
206
206
207 :param repo_name: target or source repo
207 :param repo_name: target or source repo
208 :param source: boolean flag to specify if repo_name refers to source
208 :param source: boolean flag to specify if repo_name refers to source
209 :param statuses: list of pull request statuses
209 :param statuses: list of pull request statuses
210 :param opened_by: author user of the pull request
210 :param opened_by: author user of the pull request
211 :returns: int number of pull requests
211 :returns: int number of pull requests
212 """
212 """
213 pull_requests = self.get_awaiting_review(
213 pull_requests = self.get_awaiting_review(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215
215
216 return len(pull_requests)
216 return len(pull_requests)
217
217
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 opened_by=None, offset=0, length=None,
219 opened_by=None, offset=0, length=None,
220 order_by=None, order_dir='desc'):
220 order_by=None, order_dir='desc'):
221 """
221 """
222 Get all pull requests for a specific repository that are awaiting
222 Get all pull requests for a specific repository that are awaiting
223 review.
223 review.
224
224
225 :param repo_name: target or source repo
225 :param repo_name: target or source repo
226 :param source: boolean flag to specify if repo_name refers to source
226 :param source: boolean flag to specify if repo_name refers to source
227 :param statuses: list of pull request statuses
227 :param statuses: list of pull request statuses
228 :param opened_by: author user of the pull request
228 :param opened_by: author user of the pull request
229 :param offset: pagination offset
229 :param offset: pagination offset
230 :param length: length of returned list
230 :param length: length of returned list
231 :param order_by: order of the returned list
231 :param order_by: order of the returned list
232 :param order_dir: 'asc' or 'desc' ordering direction
232 :param order_dir: 'asc' or 'desc' ordering direction
233 :returns: list of pull requests
233 :returns: list of pull requests
234 """
234 """
235 pull_requests = self.get_all(
235 pull_requests = self.get_all(
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 order_by=order_by, order_dir=order_dir)
237 order_by=order_by, order_dir=order_dir)
238
238
239 _filtered_pull_requests = []
239 _filtered_pull_requests = []
240 for pr in pull_requests:
240 for pr in pull_requests:
241 status = pr.calculated_review_status()
241 status = pr.calculated_review_status()
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 _filtered_pull_requests.append(pr)
244 _filtered_pull_requests.append(pr)
245 if length:
245 if length:
246 return _filtered_pull_requests[offset:offset+length]
246 return _filtered_pull_requests[offset:offset+length]
247 else:
247 else:
248 return _filtered_pull_requests
248 return _filtered_pull_requests
249
249
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 opened_by=None, user_id=None):
251 opened_by=None, user_id=None):
252 """
252 """
253 Count the number of pull requests for a specific repository that are
253 Count the number of pull requests for a specific repository that are
254 awaiting review from a specific user.
254 awaiting review from a specific user.
255
255
256 :param repo_name: target or source repo
256 :param repo_name: target or source repo
257 :param source: boolean flag to specify if repo_name refers to source
257 :param source: boolean flag to specify if repo_name refers to source
258 :param statuses: list of pull request statuses
258 :param statuses: list of pull request statuses
259 :param opened_by: author user of the pull request
259 :param opened_by: author user of the pull request
260 :param user_id: reviewer user of the pull request
260 :param user_id: reviewer user of the pull request
261 :returns: int number of pull requests
261 :returns: int number of pull requests
262 """
262 """
263 pull_requests = self.get_awaiting_my_review(
263 pull_requests = self.get_awaiting_my_review(
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 user_id=user_id)
265 user_id=user_id)
266
266
267 return len(pull_requests)
267 return len(pull_requests)
268
268
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 opened_by=None, user_id=None, offset=0,
270 opened_by=None, user_id=None, offset=0,
271 length=None, order_by=None, order_dir='desc'):
271 length=None, order_by=None, order_dir='desc'):
272 """
272 """
273 Get all pull requests for a specific repository that are awaiting
273 Get all pull requests for a specific repository that are awaiting
274 review from a specific user.
274 review from a specific user.
275
275
276 :param repo_name: target or source repo
276 :param repo_name: target or source repo
277 :param source: boolean flag to specify if repo_name refers to source
277 :param source: boolean flag to specify if repo_name refers to source
278 :param statuses: list of pull request statuses
278 :param statuses: list of pull request statuses
279 :param opened_by: author user of the pull request
279 :param opened_by: author user of the pull request
280 :param user_id: reviewer user of the pull request
280 :param user_id: reviewer user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _my = PullRequestModel().get_not_reviewed(user_id)
291 _my = PullRequestModel().get_not_reviewed(user_id)
292 my_participation = []
292 my_participation = []
293 for pr in pull_requests:
293 for pr in pull_requests:
294 if pr in _my:
294 if pr in _my:
295 my_participation.append(pr)
295 my_participation.append(pr)
296 _filtered_pull_requests = my_participation
296 _filtered_pull_requests = my_participation
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def get_not_reviewed(self, user_id):
302 def get_not_reviewed(self, user_id):
303 return [
303 return [
304 x.pull_request for x in PullRequestReviewers.query().filter(
304 x.pull_request for x in PullRequestReviewers.query().filter(
305 PullRequestReviewers.user_id == user_id).all()
305 PullRequestReviewers.user_id == user_id).all()
306 ]
306 ]
307
307
308 def get_versions(self, pull_request):
308 def get_versions(self, pull_request):
309 """
309 """
310 returns version of pull request sorted by ID descending
310 returns version of pull request sorted by ID descending
311 """
311 """
312 return PullRequestVersion.query()\
312 return PullRequestVersion.query()\
313 .filter(PullRequestVersion.pull_request == pull_request)\
313 .filter(PullRequestVersion.pull_request == pull_request)\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 .all()
315 .all()
316
316
317 def create(self, created_by, source_repo, source_ref, target_repo,
317 def create(self, created_by, source_repo, source_ref, target_repo,
318 target_ref, revisions, reviewers, title, description=None):
318 target_ref, revisions, reviewers, title, description=None):
319 created_by_user = self._get_user(created_by)
319 created_by_user = self._get_user(created_by)
320 source_repo = self._get_repo(source_repo)
320 source_repo = self._get_repo(source_repo)
321 target_repo = self._get_repo(target_repo)
321 target_repo = self._get_repo(target_repo)
322
322
323 pull_request = PullRequest()
323 pull_request = PullRequest()
324 pull_request.source_repo = source_repo
324 pull_request.source_repo = source_repo
325 pull_request.source_ref = source_ref
325 pull_request.source_ref = source_ref
326 pull_request.target_repo = target_repo
326 pull_request.target_repo = target_repo
327 pull_request.target_ref = target_ref
327 pull_request.target_ref = target_ref
328 pull_request.revisions = revisions
328 pull_request.revisions = revisions
329 pull_request.title = title
329 pull_request.title = title
330 pull_request.description = description
330 pull_request.description = description
331 pull_request.author = created_by_user
331 pull_request.author = created_by_user
332
332
333 Session().add(pull_request)
333 Session().add(pull_request)
334 Session().flush()
334 Session().flush()
335
335
336 # members / reviewers
336 # members / reviewers
337 for user_id in set(reviewers):
337 for user_id in set(reviewers):
338 user = self._get_user(user_id)
338 user = self._get_user(user_id)
339 reviewer = PullRequestReviewers(user, pull_request)
339 reviewer = PullRequestReviewers(user, pull_request)
340 Session().add(reviewer)
340 Session().add(reviewer)
341
341
342 # Set approval status to "Under Review" for all commits which are
342 # Set approval status to "Under Review" for all commits which are
343 # part of this pull request.
343 # part of this pull request.
344 ChangesetStatusModel().set_status(
344 ChangesetStatusModel().set_status(
345 repo=target_repo,
345 repo=target_repo,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 user=created_by_user,
347 user=created_by_user,
348 pull_request=pull_request
348 pull_request=pull_request
349 )
349 )
350
350
351 self.notify_reviewers(pull_request, reviewers)
351 self.notify_reviewers(pull_request, reviewers)
352 self._trigger_pull_request_hook(
352 self._trigger_pull_request_hook(
353 pull_request, created_by_user, 'create')
353 pull_request, created_by_user, 'create')
354
354
355 return pull_request
355 return pull_request
356
356
357 def _trigger_pull_request_hook(self, pull_request, user, action):
357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 pull_request = self.__get_pull_request(pull_request)
358 pull_request = self.__get_pull_request(pull_request)
359 target_scm = pull_request.target_repo.scm_instance()
359 target_scm = pull_request.target_repo.scm_instance()
360 if action == 'create':
360 if action == 'create':
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 elif action == 'merge':
362 elif action == 'merge':
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 elif action == 'close':
364 elif action == 'close':
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 elif action == 'review_status_change':
366 elif action == 'review_status_change':
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 elif action == 'update':
368 elif action == 'update':
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 else:
370 else:
371 return
371 return
372
372
373 trigger_hook(
373 trigger_hook(
374 username=user.username,
374 username=user.username,
375 repo_name=pull_request.target_repo.repo_name,
375 repo_name=pull_request.target_repo.repo_name,
376 repo_alias=target_scm.alias,
376 repo_alias=target_scm.alias,
377 pull_request=pull_request)
377 pull_request=pull_request)
378
378
379 def _get_commit_ids(self, pull_request):
379 def _get_commit_ids(self, pull_request):
380 """
380 """
381 Return the commit ids of the merged pull request.
381 Return the commit ids of the merged pull request.
382
382
383 This method is not dealing correctly yet with the lack of autoupdates
383 This method is not dealing correctly yet with the lack of autoupdates
384 nor with the implicit target updates.
384 nor with the implicit target updates.
385 For example: if a commit in the source repo is already in the target it
385 For example: if a commit in the source repo is already in the target it
386 will be reported anyways.
386 will be reported anyways.
387 """
387 """
388 merge_rev = pull_request.merge_rev
388 merge_rev = pull_request.merge_rev
389 if merge_rev is None:
389 if merge_rev is None:
390 raise ValueError('This pull request was not merged yet')
390 raise ValueError('This pull request was not merged yet')
391
391
392 commit_ids = list(pull_request.revisions)
392 commit_ids = list(pull_request.revisions)
393 if merge_rev not in commit_ids:
393 if merge_rev not in commit_ids:
394 commit_ids.append(merge_rev)
394 commit_ids.append(merge_rev)
395
395
396 return commit_ids
396 return commit_ids
397
397
398 def merge(self, pull_request, user, extras):
398 def merge(self, pull_request, user, extras):
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 if merge_state.executed:
401 if merge_state.executed:
402 log.debug(
402 log.debug(
403 "Merge was successful, updating the pull request comments.")
403 "Merge was successful, updating the pull request comments.")
404 self._comment_and_close_pr(pull_request, user, merge_state)
404 self._comment_and_close_pr(pull_request, user, merge_state)
405 self._log_action('user_merged_pull_request', user, pull_request)
405 self._log_action('user_merged_pull_request', user, pull_request)
406 else:
406 else:
407 log.warn("Merge failed, not updating the pull request.")
407 log.warn("Merge failed, not updating the pull request.")
408 return merge_state
408 return merge_state
409
409
410 def _merge_pull_request(self, pull_request, user, extras):
410 def _merge_pull_request(self, pull_request, user, extras):
411 target_vcs = pull_request.target_repo.scm_instance()
411 target_vcs = pull_request.target_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
413 target_ref = self._refresh_reference(
413 target_ref = self._refresh_reference(
414 pull_request.target_ref_parts, target_vcs)
414 pull_request.target_ref_parts, target_vcs)
415
415
416 message = _(
416 message = _(
417 'Merge pull request #%(pr_id)s from '
417 'Merge pull request #%(pr_id)s from '
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 'pr_id': pull_request.pull_request_id,
419 'pr_id': pull_request.pull_request_id,
420 'source_repo': source_vcs.name,
420 'source_repo': source_vcs.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
422 'pr_title': pull_request.title
422 'pr_title': pull_request.title
423 }
423 }
424
424
425 workspace_id = self._workspace_id(pull_request)
425 workspace_id = self._workspace_id(pull_request)
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 use_rebase = self._use_rebase_for_merging(pull_request)
428 use_rebase = self._use_rebase_for_merging(pull_request)
429
429
430 callback_daemon, extras = prepare_callback_daemon(
430 callback_daemon, extras = prepare_callback_daemon(
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432
432
433 with callback_daemon:
433 with callback_daemon:
434 # TODO: johbo: Implement a clean way to run a config_override
434 # TODO: johbo: Implement a clean way to run a config_override
435 # for a single call.
435 # for a single call.
436 target_vcs.config.set(
436 target_vcs.config.set(
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 merge_state = target_vcs.merge(
438 merge_state = target_vcs.merge(
439 target_ref, source_vcs, pull_request.source_ref_parts,
439 target_ref, source_vcs, pull_request.source_ref_parts,
440 workspace_id, user_name=user.username,
440 workspace_id, user_name=user.username,
441 user_email=user.email, message=message, use_rebase=use_rebase)
441 user_email=user.email, message=message, use_rebase=use_rebase)
442 return merge_state
442 return merge_state
443
443
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 pull_request.merge_rev = merge_state.merge_commit_id
445 pull_request.merge_rev = merge_state.merge_commit_id
446 pull_request.updated_on = datetime.datetime.now()
446 pull_request.updated_on = datetime.datetime.now()
447
447
448 ChangesetCommentsModel().create(
448 ChangesetCommentsModel().create(
449 text=unicode(_('Pull request merged and closed')),
449 text=unicode(_('Pull request merged and closed')),
450 repo=pull_request.target_repo.repo_id,
450 repo=pull_request.target_repo.repo_id,
451 user=user.user_id,
451 user=user.user_id,
452 pull_request=pull_request.pull_request_id,
452 pull_request=pull_request.pull_request_id,
453 f_path=None,
453 f_path=None,
454 line_no=None,
454 line_no=None,
455 closing_pr=True
455 closing_pr=True
456 )
456 )
457
457
458 Session().add(pull_request)
458 Session().add(pull_request)
459 Session().flush()
459 Session().flush()
460 # TODO: paris: replace invalidation with less radical solution
460 # TODO: paris: replace invalidation with less radical solution
461 ScmModel().mark_for_invalidation(
461 ScmModel().mark_for_invalidation(
462 pull_request.target_repo.repo_name)
462 pull_request.target_repo.repo_name)
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464
464
465 def has_valid_update_type(self, pull_request):
465 def has_valid_update_type(self, pull_request):
466 source_ref_type = pull_request.source_ref_parts.type
466 source_ref_type = pull_request.source_ref_parts.type
467 return source_ref_type in ['book', 'branch', 'tag']
467 return source_ref_type in ['book', 'branch', 'tag']
468
468
469 def update_commits(self, pull_request):
469 def update_commits(self, pull_request):
470 """
470 """
471 Get the updated list of commits for the pull request
471 Get the updated list of commits for the pull request
472 and return the new pull request version and the list
472 and return the new pull request version and the list
473 of commits processed by this update action
473 of commits processed by this update action
474 """
474 """
475
475
476 pull_request = self.__get_pull_request(pull_request)
476 pull_request = self.__get_pull_request(pull_request)
477 source_ref_type = pull_request.source_ref_parts.type
477 source_ref_type = pull_request.source_ref_parts.type
478 source_ref_name = pull_request.source_ref_parts.name
478 source_ref_name = pull_request.source_ref_parts.name
479 source_ref_id = pull_request.source_ref_parts.commit_id
479 source_ref_id = pull_request.source_ref_parts.commit_id
480
480
481 if not self.has_valid_update_type(pull_request):
481 if not self.has_valid_update_type(pull_request):
482 log.debug(
482 log.debug(
483 "Skipping update of pull request %s due to ref type: %s",
483 "Skipping update of pull request %s due to ref type: %s",
484 pull_request, source_ref_type)
484 pull_request, source_ref_type)
485 return (None, None)
485 return (None, None)
486
486
487 source_repo = pull_request.source_repo.scm_instance()
487 source_repo = pull_request.source_repo.scm_instance()
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 if source_ref_id == source_commit.raw_id:
489 if source_ref_id == source_commit.raw_id:
490 log.debug("Nothing changed in pull request %s", pull_request)
490 log.debug("Nothing changed in pull request %s", pull_request)
491 return (None, None)
491 return (None, None)
492
492
493 # Finally there is a need for an update
493 # Finally there is a need for an update
494 pull_request_version = self._create_version_from_snapshot(pull_request)
494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 self._link_comments_to_version(pull_request_version)
495 self._link_comments_to_version(pull_request_version)
496
496
497 target_ref_type = pull_request.target_ref_parts.type
497 target_ref_type = pull_request.target_ref_parts.type
498 target_ref_name = pull_request.target_ref_parts.name
498 target_ref_name = pull_request.target_ref_parts.name
499 target_ref_id = pull_request.target_ref_parts.commit_id
499 target_ref_id = pull_request.target_ref_parts.commit_id
500 target_repo = pull_request.target_repo.scm_instance()
500 target_repo = pull_request.target_repo.scm_instance()
501
501
502 if target_ref_type in ('tag', 'branch', 'book'):
502 if target_ref_type in ('tag', 'branch', 'book'):
503 target_commit = target_repo.get_commit(target_ref_name)
503 target_commit = target_repo.get_commit(target_ref_name)
504 else:
504 else:
505 target_commit = target_repo.get_commit(target_ref_id)
505 target_commit = target_repo.get_commit(target_ref_id)
506
506
507 # re-compute commit ids
507 # re-compute commit ids
508 old_commit_ids = set(pull_request.revisions)
508 old_commit_ids = set(pull_request.revisions)
509 pre_load = ["author", "branch", "date", "message"]
509 pre_load = ["author", "branch", "date", "message"]
510 commit_ranges = target_repo.compare(
510 commit_ranges = target_repo.compare(
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 pre_load=pre_load)
512 pre_load=pre_load)
513
513
514 ancestor = target_repo.get_common_ancestor(
514 ancestor = target_repo.get_common_ancestor(
515 target_commit.raw_id, source_commit.raw_id, source_repo)
515 target_commit.raw_id, source_commit.raw_id, source_repo)
516
516
517 pull_request.source_ref = '%s:%s:%s' % (
517 pull_request.source_ref = '%s:%s:%s' % (
518 source_ref_type, source_ref_name, source_commit.raw_id)
518 source_ref_type, source_ref_name, source_commit.raw_id)
519 pull_request.target_ref = '%s:%s:%s' % (
519 pull_request.target_ref = '%s:%s:%s' % (
520 target_ref_type, target_ref_name, ancestor)
520 target_ref_type, target_ref_name, ancestor)
521 pull_request.revisions = [
521 pull_request.revisions = [
522 commit.raw_id for commit in reversed(commit_ranges)]
522 commit.raw_id for commit in reversed(commit_ranges)]
523 pull_request.updated_on = datetime.datetime.now()
523 pull_request.updated_on = datetime.datetime.now()
524 Session().add(pull_request)
524 Session().add(pull_request)
525 new_commit_ids = set(pull_request.revisions)
525 new_commit_ids = set(pull_request.revisions)
526
526
527 changes = self._calculate_commit_id_changes(
527 changes = self._calculate_commit_id_changes(
528 old_commit_ids, new_commit_ids)
528 old_commit_ids, new_commit_ids)
529
529
530 old_diff_data, new_diff_data = self._generate_update_diffs(
530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 pull_request, pull_request_version)
531 pull_request, pull_request_version)
532
532
533 ChangesetCommentsModel().outdate_comments(
533 ChangesetCommentsModel().outdate_comments(
534 pull_request, old_diff_data=old_diff_data,
534 pull_request, old_diff_data=old_diff_data,
535 new_diff_data=new_diff_data)
535 new_diff_data=new_diff_data)
536
536
537 file_changes = self._calculate_file_changes(
537 file_changes = self._calculate_file_changes(
538 old_diff_data, new_diff_data)
538 old_diff_data, new_diff_data)
539
539
540 # Add an automatic comment to the pull request
540 # Add an automatic comment to the pull request
541 update_comment = ChangesetCommentsModel().create(
541 update_comment = ChangesetCommentsModel().create(
542 text=self._render_update_message(changes, file_changes),
542 text=self._render_update_message(changes, file_changes),
543 repo=pull_request.target_repo,
543 repo=pull_request.target_repo,
544 user=pull_request.author,
544 user=pull_request.author,
545 pull_request=pull_request,
545 pull_request=pull_request,
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547
547
548 # Update status to "Under Review" for added commits
548 # Update status to "Under Review" for added commits
549 for commit_id in changes.added:
549 for commit_id in changes.added:
550 ChangesetStatusModel().set_status(
550 ChangesetStatusModel().set_status(
551 repo=pull_request.source_repo,
551 repo=pull_request.source_repo,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 comment=update_comment,
553 comment=update_comment,
554 user=pull_request.author,
554 user=pull_request.author,
555 pull_request=pull_request,
555 pull_request=pull_request,
556 revision=commit_id)
556 revision=commit_id)
557
557
558 log.debug(
558 log.debug(
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 'removed_ids: %s', pull_request.pull_request_id,
560 'removed_ids: %s', pull_request.pull_request_id,
561 changes.added, changes.common, changes.removed)
561 changes.added, changes.common, changes.removed)
562 log.debug('Updated pull request with the following file changes: %s',
562 log.debug('Updated pull request with the following file changes: %s',
563 file_changes)
563 file_changes)
564
564
565 log.info(
565 log.info(
566 "Updated pull request %s from commit %s to commit %s, "
566 "Updated pull request %s from commit %s to commit %s, "
567 "stored new version %s of this pull request.",
567 "stored new version %s of this pull request.",
568 pull_request.pull_request_id, source_ref_id,
568 pull_request.pull_request_id, source_ref_id,
569 pull_request.source_ref_parts.commit_id,
569 pull_request.source_ref_parts.commit_id,
570 pull_request_version.pull_request_version_id)
570 pull_request_version.pull_request_version_id)
571 Session().commit()
571 Session().commit()
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 'update')
573 'update')
574 return (pull_request_version, changes)
574 return (pull_request_version, changes)
575
575
576 def _create_version_from_snapshot(self, pull_request):
576 def _create_version_from_snapshot(self, pull_request):
577 version = PullRequestVersion()
577 version = PullRequestVersion()
578 version.title = pull_request.title
578 version.title = pull_request.title
579 version.description = pull_request.description
579 version.description = pull_request.description
580 version.status = pull_request.status
580 version.status = pull_request.status
581 version.created_on = pull_request.created_on
581 version.created_on = pull_request.created_on
582 version.updated_on = pull_request.updated_on
582 version.updated_on = pull_request.updated_on
583 version.user_id = pull_request.user_id
583 version.user_id = pull_request.user_id
584 version.source_repo = pull_request.source_repo
584 version.source_repo = pull_request.source_repo
585 version.source_ref = pull_request.source_ref
585 version.source_ref = pull_request.source_ref
586 version.target_repo = pull_request.target_repo
586 version.target_repo = pull_request.target_repo
587 version.target_ref = pull_request.target_ref
587 version.target_ref = pull_request.target_ref
588
588
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 version._last_merge_status = pull_request._last_merge_status
591 version._last_merge_status = pull_request._last_merge_status
592 version.merge_rev = pull_request.merge_rev
592 version.merge_rev = pull_request.merge_rev
593
593
594 version.revisions = pull_request.revisions
594 version.revisions = pull_request.revisions
595 version.pull_request = pull_request
595 version.pull_request = pull_request
596 Session().add(version)
596 Session().add(version)
597 Session().flush()
597 Session().flush()
598
598
599 return version
599 return version
600
600
601 def _generate_update_diffs(self, pull_request, pull_request_version):
601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 diff_context = (
602 diff_context = (
603 self.DIFF_CONTEXT +
603 self.DIFF_CONTEXT +
604 ChangesetCommentsModel.needed_extra_diff_context())
604 ChangesetCommentsModel.needed_extra_diff_context())
605 old_diff = self._get_diff_from_pr_or_version(
605 old_diff = self._get_diff_from_pr_or_version(
606 pull_request_version, context=diff_context)
606 pull_request_version, context=diff_context)
607 new_diff = self._get_diff_from_pr_or_version(
607 new_diff = self._get_diff_from_pr_or_version(
608 pull_request, context=diff_context)
608 pull_request, context=diff_context)
609
609
610 old_diff_data = diffs.DiffProcessor(old_diff)
610 old_diff_data = diffs.DiffProcessor(old_diff)
611 old_diff_data.prepare()
611 old_diff_data.prepare()
612 new_diff_data = diffs.DiffProcessor(new_diff)
612 new_diff_data = diffs.DiffProcessor(new_diff)
613 new_diff_data.prepare()
613 new_diff_data.prepare()
614
614
615 return old_diff_data, new_diff_data
615 return old_diff_data, new_diff_data
616
616
617 def _link_comments_to_version(self, pull_request_version):
617 def _link_comments_to_version(self, pull_request_version):
618 """
618 """
619 Link all unlinked comments of this pull request to the given version.
619 Link all unlinked comments of this pull request to the given version.
620
620
621 :param pull_request_version: The `PullRequestVersion` to which
621 :param pull_request_version: The `PullRequestVersion` to which
622 the comments shall be linked.
622 the comments shall be linked.
623
623
624 """
624 """
625 pull_request = pull_request_version.pull_request
625 pull_request = pull_request_version.pull_request
626 comments = ChangesetComment.query().filter(
626 comments = ChangesetComment.query().filter(
627 # TODO: johbo: Should we query for the repo at all here?
627 # TODO: johbo: Should we query for the repo at all here?
628 # Pending decision on how comments of PRs are to be related
628 # Pending decision on how comments of PRs are to be related
629 # to either the source repo, the target repo or no repo at all.
629 # to either the source repo, the target repo or no repo at all.
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 ChangesetComment.pull_request == pull_request,
631 ChangesetComment.pull_request == pull_request,
632 ChangesetComment.pull_request_version == None)
632 ChangesetComment.pull_request_version == None)
633
633
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 # operation.
635 # operation.
636 for comment in comments:
636 for comment in comments:
637 comment.pull_request_version_id = (
637 comment.pull_request_version_id = (
638 pull_request_version.pull_request_version_id)
638 pull_request_version.pull_request_version_id)
639 Session().add(comment)
639 Session().add(comment)
640
640
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 added = new_ids.difference(old_ids)
642 added = new_ids.difference(old_ids)
643 common = old_ids.intersection(new_ids)
643 common = old_ids.intersection(new_ids)
644 removed = old_ids.difference(new_ids)
644 removed = old_ids.difference(new_ids)
645 return ChangeTuple(added, common, removed)
645 return ChangeTuple(added, common, removed)
646
646
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648
648
649 old_files = OrderedDict()
649 old_files = OrderedDict()
650 for diff_data in old_diff_data.parsed_diff:
650 for diff_data in old_diff_data.parsed_diff:
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652
652
653 added_files = []
653 added_files = []
654 modified_files = []
654 modified_files = []
655 removed_files = []
655 removed_files = []
656 for diff_data in new_diff_data.parsed_diff:
656 for diff_data in new_diff_data.parsed_diff:
657 new_filename = diff_data['filename']
657 new_filename = diff_data['filename']
658 new_hash = md5_safe(diff_data['raw_diff'])
658 new_hash = md5_safe(diff_data['raw_diff'])
659
659
660 old_hash = old_files.get(new_filename)
660 old_hash = old_files.get(new_filename)
661 if not old_hash:
661 if not old_hash:
662 # file is not present in old diff, means it's added
662 # file is not present in old diff, means it's added
663 added_files.append(new_filename)
663 added_files.append(new_filename)
664 else:
664 else:
665 if new_hash != old_hash:
665 if new_hash != old_hash:
666 modified_files.append(new_filename)
666 modified_files.append(new_filename)
667 # now remove a file from old, since we have seen it already
667 # now remove a file from old, since we have seen it already
668 del old_files[new_filename]
668 del old_files[new_filename]
669
669
670 # removed files is when there are present in old, but not in NEW,
670 # removed files is when there are present in old, but not in NEW,
671 # since we remove old files that are present in new diff, left-overs
671 # since we remove old files that are present in new diff, left-overs
672 # if any should be the removed files
672 # if any should be the removed files
673 removed_files.extend(old_files.keys())
673 removed_files.extend(old_files.keys())
674
674
675 return FileChangeTuple(added_files, modified_files, removed_files)
675 return FileChangeTuple(added_files, modified_files, removed_files)
676
676
677 def _render_update_message(self, changes, file_changes):
677 def _render_update_message(self, changes, file_changes):
678 """
678 """
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 so it's always looking the same disregarding on which default
680 so it's always looking the same disregarding on which default
681 renderer system is using.
681 renderer system is using.
682
682
683 :param changes: changes named tuple
683 :param changes: changes named tuple
684 :param file_changes: file changes named tuple
684 :param file_changes: file changes named tuple
685
685
686 """
686 """
687 new_status = ChangesetStatus.get_status_lbl(
687 new_status = ChangesetStatus.get_status_lbl(
688 ChangesetStatus.STATUS_UNDER_REVIEW)
688 ChangesetStatus.STATUS_UNDER_REVIEW)
689
689
690 changed_files = (
690 changed_files = (
691 file_changes.added + file_changes.modified + file_changes.removed)
691 file_changes.added + file_changes.modified + file_changes.removed)
692
692
693 params = {
693 params = {
694 'under_review_label': new_status,
694 'under_review_label': new_status,
695 'added_commits': changes.added,
695 'added_commits': changes.added,
696 'removed_commits': changes.removed,
696 'removed_commits': changes.removed,
697 'changed_files': changed_files,
697 'changed_files': changed_files,
698 'added_files': file_changes.added,
698 'added_files': file_changes.added,
699 'modified_files': file_changes.modified,
699 'modified_files': file_changes.modified,
700 'removed_files': file_changes.removed,
700 'removed_files': file_changes.removed,
701 }
701 }
702 renderer = RstTemplateRenderer()
702 renderer = RstTemplateRenderer()
703 return renderer.render('pull_request_update.mako', **params)
703 return renderer.render('pull_request_update.mako', **params)
704
704
705 def edit(self, pull_request, title, description):
705 def edit(self, pull_request, title, description):
706 pull_request = self.__get_pull_request(pull_request)
706 pull_request = self.__get_pull_request(pull_request)
707 if pull_request.is_closed():
707 if pull_request.is_closed():
708 raise ValueError('This pull request is closed')
708 raise ValueError('This pull request is closed')
709 if title:
709 if title:
710 pull_request.title = title
710 pull_request.title = title
711 pull_request.description = description
711 pull_request.description = description
712 pull_request.updated_on = datetime.datetime.now()
712 pull_request.updated_on = datetime.datetime.now()
713 Session().add(pull_request)
713 Session().add(pull_request)
714
714
715 def update_reviewers(self, pull_request, reviewers_ids):
715 def update_reviewers(self, pull_request, reviewers_ids):
716 reviewers_ids = set(reviewers_ids)
716 reviewers_ids = set(reviewers_ids)
717 pull_request = self.__get_pull_request(pull_request)
717 pull_request = self.__get_pull_request(pull_request)
718 current_reviewers = PullRequestReviewers.query()\
718 current_reviewers = PullRequestReviewers.query()\
719 .filter(PullRequestReviewers.pull_request ==
719 .filter(PullRequestReviewers.pull_request ==
720 pull_request).all()
720 pull_request).all()
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722
722
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725
725
726 log.debug("Adding %s reviewers", ids_to_add)
726 log.debug("Adding %s reviewers", ids_to_add)
727 log.debug("Removing %s reviewers", ids_to_remove)
727 log.debug("Removing %s reviewers", ids_to_remove)
728 changed = False
728 changed = False
729 for uid in ids_to_add:
729 for uid in ids_to_add:
730 changed = True
730 changed = True
731 _usr = self._get_user(uid)
731 _usr = self._get_user(uid)
732 reviewer = PullRequestReviewers(_usr, pull_request)
732 reviewer = PullRequestReviewers(_usr, pull_request)
733 Session().add(reviewer)
733 Session().add(reviewer)
734
734
735 self.notify_reviewers(pull_request, ids_to_add)
735 self.notify_reviewers(pull_request, ids_to_add)
736
736
737 for uid in ids_to_remove:
737 for uid in ids_to_remove:
738 changed = True
738 changed = True
739 reviewer = PullRequestReviewers.query()\
739 reviewer = PullRequestReviewers.query()\
740 .filter(PullRequestReviewers.user_id == uid,
740 .filter(PullRequestReviewers.user_id == uid,
741 PullRequestReviewers.pull_request == pull_request)\
741 PullRequestReviewers.pull_request == pull_request)\
742 .scalar()
742 .scalar()
743 if reviewer:
743 if reviewer:
744 Session().delete(reviewer)
744 Session().delete(reviewer)
745 if changed:
745 if changed:
746 pull_request.updated_on = datetime.datetime.now()
746 pull_request.updated_on = datetime.datetime.now()
747 Session().add(pull_request)
747 Session().add(pull_request)
748
748
749 return ids_to_add, ids_to_remove
749 return ids_to_add, ids_to_remove
750
750
751 def get_url(self, pull_request):
752 return url('pullrequest_show', repo_name=self.target_repo.repo_name,
753 pull_request_id=self.pull_request_id,
754 qualified=True)
755
751 def notify_reviewers(self, pull_request, reviewers_ids):
756 def notify_reviewers(self, pull_request, reviewers_ids):
752 # notification to reviewers
757 # notification to reviewers
753 if not reviewers_ids:
758 if not reviewers_ids:
754 return
759 return
755
760
756 pull_request_obj = pull_request
761 pull_request_obj = pull_request
757 # get the current participants of this pull request
762 # get the current participants of this pull request
758 recipients = reviewers_ids
763 recipients = reviewers_ids
759 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
764 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
760
765
761 pr_source_repo = pull_request_obj.source_repo
766 pr_source_repo = pull_request_obj.source_repo
762 pr_target_repo = pull_request_obj.target_repo
767 pr_target_repo = pull_request_obj.target_repo
763
768
764 pr_url = h.url(
769 pr_url = h.url(
765 'pullrequest_show',
770 'pullrequest_show',
766 repo_name=pr_target_repo.repo_name,
771 repo_name=pr_target_repo.repo_name,
767 pull_request_id=pull_request_obj.pull_request_id,
772 pull_request_id=pull_request_obj.pull_request_id,
768 qualified=True,)
773 qualified=True,)
769
774
770 # set some variables for email notification
775 # set some variables for email notification
771 pr_target_repo_url = h.url(
776 pr_target_repo_url = h.url(
772 'summary_home',
777 'summary_home',
773 repo_name=pr_target_repo.repo_name,
778 repo_name=pr_target_repo.repo_name,
774 qualified=True)
779 qualified=True)
775
780
776 pr_source_repo_url = h.url(
781 pr_source_repo_url = h.url(
777 'summary_home',
782 'summary_home',
778 repo_name=pr_source_repo.repo_name,
783 repo_name=pr_source_repo.repo_name,
779 qualified=True)
784 qualified=True)
780
785
781 # pull request specifics
786 # pull request specifics
782 pull_request_commits = [
787 pull_request_commits = [
783 (x.raw_id, x.message)
788 (x.raw_id, x.message)
784 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
789 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
785
790
786 kwargs = {
791 kwargs = {
787 'user': pull_request.author,
792 'user': pull_request.author,
788 'pull_request': pull_request_obj,
793 'pull_request': pull_request_obj,
789 'pull_request_commits': pull_request_commits,
794 'pull_request_commits': pull_request_commits,
790
795
791 'pull_request_target_repo': pr_target_repo,
796 'pull_request_target_repo': pr_target_repo,
792 'pull_request_target_repo_url': pr_target_repo_url,
797 'pull_request_target_repo_url': pr_target_repo_url,
793
798
794 'pull_request_source_repo': pr_source_repo,
799 'pull_request_source_repo': pr_source_repo,
795 'pull_request_source_repo_url': pr_source_repo_url,
800 'pull_request_source_repo_url': pr_source_repo_url,
796
801
797 'pull_request_url': pr_url,
802 'pull_request_url': pr_url,
798 }
803 }
799
804
800 # pre-generate the subject for notification itself
805 # pre-generate the subject for notification itself
801 (subject,
806 (subject,
802 _h, _e, # we don't care about those
807 _h, _e, # we don't care about those
803 body_plaintext) = EmailNotificationModel().render_email(
808 body_plaintext) = EmailNotificationModel().render_email(
804 notification_type, **kwargs)
809 notification_type, **kwargs)
805
810
806 # create notification objects, and emails
811 # create notification objects, and emails
807 NotificationModel().create(
812 NotificationModel().create(
808 created_by=pull_request.author,
813 created_by=pull_request.author,
809 notification_subject=subject,
814 notification_subject=subject,
810 notification_body=body_plaintext,
815 notification_body=body_plaintext,
811 notification_type=notification_type,
816 notification_type=notification_type,
812 recipients=recipients,
817 recipients=recipients,
813 email_kwargs=kwargs,
818 email_kwargs=kwargs,
814 )
819 )
815
820
816 def delete(self, pull_request):
821 def delete(self, pull_request):
817 pull_request = self.__get_pull_request(pull_request)
822 pull_request = self.__get_pull_request(pull_request)
818 self._cleanup_merge_workspace(pull_request)
823 self._cleanup_merge_workspace(pull_request)
819 Session().delete(pull_request)
824 Session().delete(pull_request)
820
825
821 def close_pull_request(self, pull_request, user):
826 def close_pull_request(self, pull_request, user):
822 pull_request = self.__get_pull_request(pull_request)
827 pull_request = self.__get_pull_request(pull_request)
823 self._cleanup_merge_workspace(pull_request)
828 self._cleanup_merge_workspace(pull_request)
824 pull_request.status = PullRequest.STATUS_CLOSED
829 pull_request.status = PullRequest.STATUS_CLOSED
825 pull_request.updated_on = datetime.datetime.now()
830 pull_request.updated_on = datetime.datetime.now()
826 Session().add(pull_request)
831 Session().add(pull_request)
827 self._trigger_pull_request_hook(
832 self._trigger_pull_request_hook(
828 pull_request, pull_request.author, 'close')
833 pull_request, pull_request.author, 'close')
829 self._log_action('user_closed_pull_request', user, pull_request)
834 self._log_action('user_closed_pull_request', user, pull_request)
830
835
831 def close_pull_request_with_comment(self, pull_request, user, repo,
836 def close_pull_request_with_comment(self, pull_request, user, repo,
832 message=None):
837 message=None):
833 status = ChangesetStatus.STATUS_REJECTED
838 status = ChangesetStatus.STATUS_REJECTED
834
839
835 if not message:
840 if not message:
836 message = (
841 message = (
837 _('Status change %(transition_icon)s %(status)s') % {
842 _('Status change %(transition_icon)s %(status)s') % {
838 'transition_icon': '>',
843 'transition_icon': '>',
839 'status': ChangesetStatus.get_status_lbl(status)})
844 'status': ChangesetStatus.get_status_lbl(status)})
840
845
841 internal_message = _('Closing with') + ' ' + message
846 internal_message = _('Closing with') + ' ' + message
842
847
843 comm = ChangesetCommentsModel().create(
848 comm = ChangesetCommentsModel().create(
844 text=internal_message,
849 text=internal_message,
845 repo=repo.repo_id,
850 repo=repo.repo_id,
846 user=user.user_id,
851 user=user.user_id,
847 pull_request=pull_request.pull_request_id,
852 pull_request=pull_request.pull_request_id,
848 f_path=None,
853 f_path=None,
849 line_no=None,
854 line_no=None,
850 status_change=ChangesetStatus.get_status_lbl(status),
855 status_change=ChangesetStatus.get_status_lbl(status),
851 closing_pr=True
856 closing_pr=True
852 )
857 )
853
858
854 ChangesetStatusModel().set_status(
859 ChangesetStatusModel().set_status(
855 repo.repo_id,
860 repo.repo_id,
856 status,
861 status,
857 user.user_id,
862 user.user_id,
858 comm,
863 comm,
859 pull_request=pull_request.pull_request_id
864 pull_request=pull_request.pull_request_id
860 )
865 )
861 Session().flush()
866 Session().flush()
862
867
863 PullRequestModel().close_pull_request(
868 PullRequestModel().close_pull_request(
864 pull_request.pull_request_id, user)
869 pull_request.pull_request_id, user)
865
870
866 def merge_status(self, pull_request):
871 def merge_status(self, pull_request):
867 if not self._is_merge_enabled(pull_request):
872 if not self._is_merge_enabled(pull_request):
868 return False, _('Server-side pull request merging is disabled.')
873 return False, _('Server-side pull request merging is disabled.')
869 if pull_request.is_closed():
874 if pull_request.is_closed():
870 return False, _('This pull request is closed.')
875 return False, _('This pull request is closed.')
871 merge_possible, msg = self._check_repo_requirements(
876 merge_possible, msg = self._check_repo_requirements(
872 target=pull_request.target_repo, source=pull_request.source_repo)
877 target=pull_request.target_repo, source=pull_request.source_repo)
873 if not merge_possible:
878 if not merge_possible:
874 return merge_possible, msg
879 return merge_possible, msg
875
880
876 try:
881 try:
877 resp = self._try_merge(pull_request)
882 resp = self._try_merge(pull_request)
878 status = resp.possible, self.merge_status_message(
883 status = resp.possible, self.merge_status_message(
879 resp.failure_reason)
884 resp.failure_reason)
880 except NotImplementedError:
885 except NotImplementedError:
881 status = False, _('Pull request merging is not supported.')
886 status = False, _('Pull request merging is not supported.')
882
887
883 return status
888 return status
884
889
885 def _check_repo_requirements(self, target, source):
890 def _check_repo_requirements(self, target, source):
886 """
891 """
887 Check if `target` and `source` have compatible requirements.
892 Check if `target` and `source` have compatible requirements.
888
893
889 Currently this is just checking for largefiles.
894 Currently this is just checking for largefiles.
890 """
895 """
891 target_has_largefiles = self._has_largefiles(target)
896 target_has_largefiles = self._has_largefiles(target)
892 source_has_largefiles = self._has_largefiles(source)
897 source_has_largefiles = self._has_largefiles(source)
893 merge_possible = True
898 merge_possible = True
894 message = u''
899 message = u''
895
900
896 if target_has_largefiles != source_has_largefiles:
901 if target_has_largefiles != source_has_largefiles:
897 merge_possible = False
902 merge_possible = False
898 if source_has_largefiles:
903 if source_has_largefiles:
899 message = _(
904 message = _(
900 'Target repository large files support is disabled.')
905 'Target repository large files support is disabled.')
901 else:
906 else:
902 message = _(
907 message = _(
903 'Source repository large files support is disabled.')
908 'Source repository large files support is disabled.')
904
909
905 return merge_possible, message
910 return merge_possible, message
906
911
907 def _has_largefiles(self, repo):
912 def _has_largefiles(self, repo):
908 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
913 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
909 'extensions', 'largefiles')
914 'extensions', 'largefiles')
910 return largefiles_ui and largefiles_ui[0].active
915 return largefiles_ui and largefiles_ui[0].active
911
916
912 def _try_merge(self, pull_request):
917 def _try_merge(self, pull_request):
913 """
918 """
914 Try to merge the pull request and return the merge status.
919 Try to merge the pull request and return the merge status.
915 """
920 """
916 log.debug(
921 log.debug(
917 "Trying out if the pull request %s can be merged.",
922 "Trying out if the pull request %s can be merged.",
918 pull_request.pull_request_id)
923 pull_request.pull_request_id)
919 target_vcs = pull_request.target_repo.scm_instance()
924 target_vcs = pull_request.target_repo.scm_instance()
920 target_ref = self._refresh_reference(
925 target_ref = self._refresh_reference(
921 pull_request.target_ref_parts, target_vcs)
926 pull_request.target_ref_parts, target_vcs)
922
927
923 target_locked = pull_request.target_repo.locked
928 target_locked = pull_request.target_repo.locked
924 if target_locked and target_locked[0]:
929 if target_locked and target_locked[0]:
925 log.debug("The target repository is locked.")
930 log.debug("The target repository is locked.")
926 merge_state = MergeResponse(
931 merge_state = MergeResponse(
927 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
932 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
928 elif self._needs_merge_state_refresh(pull_request, target_ref):
933 elif self._needs_merge_state_refresh(pull_request, target_ref):
929 log.debug("Refreshing the merge status of the repository.")
934 log.debug("Refreshing the merge status of the repository.")
930 merge_state = self._refresh_merge_state(
935 merge_state = self._refresh_merge_state(
931 pull_request, target_vcs, target_ref)
936 pull_request, target_vcs, target_ref)
932 else:
937 else:
933 possible = pull_request.\
938 possible = pull_request.\
934 _last_merge_status == MergeFailureReason.NONE
939 _last_merge_status == MergeFailureReason.NONE
935 merge_state = MergeResponse(
940 merge_state = MergeResponse(
936 possible, False, None, pull_request._last_merge_status)
941 possible, False, None, pull_request._last_merge_status)
937 log.debug("Merge response: %s", merge_state)
942 log.debug("Merge response: %s", merge_state)
938 return merge_state
943 return merge_state
939
944
940 def _refresh_reference(self, reference, vcs_repository):
945 def _refresh_reference(self, reference, vcs_repository):
941 if reference.type in ('branch', 'book'):
946 if reference.type in ('branch', 'book'):
942 name_or_id = reference.name
947 name_or_id = reference.name
943 else:
948 else:
944 name_or_id = reference.commit_id
949 name_or_id = reference.commit_id
945 refreshed_commit = vcs_repository.get_commit(name_or_id)
950 refreshed_commit = vcs_repository.get_commit(name_or_id)
946 refreshed_reference = Reference(
951 refreshed_reference = Reference(
947 reference.type, reference.name, refreshed_commit.raw_id)
952 reference.type, reference.name, refreshed_commit.raw_id)
948 return refreshed_reference
953 return refreshed_reference
949
954
950 def _needs_merge_state_refresh(self, pull_request, target_reference):
955 def _needs_merge_state_refresh(self, pull_request, target_reference):
951 return not(
956 return not(
952 pull_request.revisions and
957 pull_request.revisions and
953 pull_request.revisions[0] == pull_request._last_merge_source_rev and
958 pull_request.revisions[0] == pull_request._last_merge_source_rev and
954 target_reference.commit_id == pull_request._last_merge_target_rev)
959 target_reference.commit_id == pull_request._last_merge_target_rev)
955
960
956 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
961 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
957 workspace_id = self._workspace_id(pull_request)
962 workspace_id = self._workspace_id(pull_request)
958 source_vcs = pull_request.source_repo.scm_instance()
963 source_vcs = pull_request.source_repo.scm_instance()
959 use_rebase = self._use_rebase_for_merging(pull_request)
964 use_rebase = self._use_rebase_for_merging(pull_request)
960 merge_state = target_vcs.merge(
965 merge_state = target_vcs.merge(
961 target_reference, source_vcs, pull_request.source_ref_parts,
966 target_reference, source_vcs, pull_request.source_ref_parts,
962 workspace_id, dry_run=True, use_rebase=use_rebase)
967 workspace_id, dry_run=True, use_rebase=use_rebase)
963
968
964 # Do not store the response if there was an unknown error.
969 # Do not store the response if there was an unknown error.
965 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
970 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
966 pull_request._last_merge_source_rev = pull_request.\
971 pull_request._last_merge_source_rev = pull_request.\
967 source_ref_parts.commit_id
972 source_ref_parts.commit_id
968 pull_request._last_merge_target_rev = target_reference.commit_id
973 pull_request._last_merge_target_rev = target_reference.commit_id
969 pull_request._last_merge_status = (
974 pull_request._last_merge_status = (
970 merge_state.failure_reason)
975 merge_state.failure_reason)
971 Session().add(pull_request)
976 Session().add(pull_request)
972 Session().flush()
977 Session().flush()
973
978
974 return merge_state
979 return merge_state
975
980
976 def _workspace_id(self, pull_request):
981 def _workspace_id(self, pull_request):
977 workspace_id = 'pr-%s' % pull_request.pull_request_id
982 workspace_id = 'pr-%s' % pull_request.pull_request_id
978 return workspace_id
983 return workspace_id
979
984
980 def merge_status_message(self, status_code):
985 def merge_status_message(self, status_code):
981 """
986 """
982 Return a human friendly error message for the given merge status code.
987 Return a human friendly error message for the given merge status code.
983 """
988 """
984 return self.MERGE_STATUS_MESSAGES[status_code]
989 return self.MERGE_STATUS_MESSAGES[status_code]
985
990
986 def generate_repo_data(self, repo, commit_id=None, branch=None,
991 def generate_repo_data(self, repo, commit_id=None, branch=None,
987 bookmark=None):
992 bookmark=None):
988 all_refs, selected_ref = \
993 all_refs, selected_ref = \
989 self._get_repo_pullrequest_sources(
994 self._get_repo_pullrequest_sources(
990 repo.scm_instance(), commit_id=commit_id,
995 repo.scm_instance(), commit_id=commit_id,
991 branch=branch, bookmark=bookmark)
996 branch=branch, bookmark=bookmark)
992
997
993 refs_select2 = []
998 refs_select2 = []
994 for element in all_refs:
999 for element in all_refs:
995 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1000 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
996 refs_select2.append({'text': element[1], 'children': children})
1001 refs_select2.append({'text': element[1], 'children': children})
997
1002
998 return {
1003 return {
999 'user': {
1004 'user': {
1000 'user_id': repo.user.user_id,
1005 'user_id': repo.user.user_id,
1001 'username': repo.user.username,
1006 'username': repo.user.username,
1002 'firstname': repo.user.firstname,
1007 'firstname': repo.user.firstname,
1003 'lastname': repo.user.lastname,
1008 'lastname': repo.user.lastname,
1004 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1009 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1005 },
1010 },
1006 'description': h.chop_at_smart(repo.description, '\n'),
1011 'description': h.chop_at_smart(repo.description, '\n'),
1007 'refs': {
1012 'refs': {
1008 'all_refs': all_refs,
1013 'all_refs': all_refs,
1009 'selected_ref': selected_ref,
1014 'selected_ref': selected_ref,
1010 'select2_refs': refs_select2
1015 'select2_refs': refs_select2
1011 }
1016 }
1012 }
1017 }
1013
1018
1014 def generate_pullrequest_title(self, source, source_ref, target):
1019 def generate_pullrequest_title(self, source, source_ref, target):
1015 return '{source}#{at_ref} to {target}'.format(
1020 return '{source}#{at_ref} to {target}'.format(
1016 source=source,
1021 source=source,
1017 at_ref=source_ref,
1022 at_ref=source_ref,
1018 target=target,
1023 target=target,
1019 )
1024 )
1020
1025
1021 def _cleanup_merge_workspace(self, pull_request):
1026 def _cleanup_merge_workspace(self, pull_request):
1022 # Merging related cleanup
1027 # Merging related cleanup
1023 target_scm = pull_request.target_repo.scm_instance()
1028 target_scm = pull_request.target_repo.scm_instance()
1024 workspace_id = 'pr-%s' % pull_request.pull_request_id
1029 workspace_id = 'pr-%s' % pull_request.pull_request_id
1025
1030
1026 try:
1031 try:
1027 target_scm.cleanup_merge_workspace(workspace_id)
1032 target_scm.cleanup_merge_workspace(workspace_id)
1028 except NotImplementedError:
1033 except NotImplementedError:
1029 pass
1034 pass
1030
1035
1031 def _get_repo_pullrequest_sources(
1036 def _get_repo_pullrequest_sources(
1032 self, repo, commit_id=None, branch=None, bookmark=None):
1037 self, repo, commit_id=None, branch=None, bookmark=None):
1033 """
1038 """
1034 Return a structure with repo's interesting commits, suitable for
1039 Return a structure with repo's interesting commits, suitable for
1035 the selectors in pullrequest controller
1040 the selectors in pullrequest controller
1036
1041
1037 :param commit_id: a commit that must be in the list somehow
1042 :param commit_id: a commit that must be in the list somehow
1038 and selected by default
1043 and selected by default
1039 :param branch: a branch that must be in the list and selected
1044 :param branch: a branch that must be in the list and selected
1040 by default - even if closed
1045 by default - even if closed
1041 :param bookmark: a bookmark that must be in the list and selected
1046 :param bookmark: a bookmark that must be in the list and selected
1042 """
1047 """
1043
1048
1044 commit_id = safe_str(commit_id) if commit_id else None
1049 commit_id = safe_str(commit_id) if commit_id else None
1045 branch = safe_str(branch) if branch else None
1050 branch = safe_str(branch) if branch else None
1046 bookmark = safe_str(bookmark) if bookmark else None
1051 bookmark = safe_str(bookmark) if bookmark else None
1047
1052
1048 selected = None
1053 selected = None
1049
1054
1050 # order matters: first source that has commit_id in it will be selected
1055 # order matters: first source that has commit_id in it will be selected
1051 sources = []
1056 sources = []
1052 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1057 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1053 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1058 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1054
1059
1055 if commit_id:
1060 if commit_id:
1056 ref_commit = (h.short_id(commit_id), commit_id)
1061 ref_commit = (h.short_id(commit_id), commit_id)
1057 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1062 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1058
1063
1059 sources.append(
1064 sources.append(
1060 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1065 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1061 )
1066 )
1062
1067
1063 groups = []
1068 groups = []
1064 for group_key, ref_list, group_name, match in sources:
1069 for group_key, ref_list, group_name, match in sources:
1065 group_refs = []
1070 group_refs = []
1066 for ref_name, ref_id in ref_list:
1071 for ref_name, ref_id in ref_list:
1067 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1072 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1068 group_refs.append((ref_key, ref_name))
1073 group_refs.append((ref_key, ref_name))
1069
1074
1070 if not selected:
1075 if not selected:
1071 if set([commit_id, match]) & set([ref_id, ref_name]):
1076 if set([commit_id, match]) & set([ref_id, ref_name]):
1072 selected = ref_key
1077 selected = ref_key
1073
1078
1074 if group_refs:
1079 if group_refs:
1075 groups.append((group_refs, group_name))
1080 groups.append((group_refs, group_name))
1076
1081
1077 if not selected:
1082 if not selected:
1078 ref = commit_id or branch or bookmark
1083 ref = commit_id or branch or bookmark
1079 if ref:
1084 if ref:
1080 raise CommitDoesNotExistError(
1085 raise CommitDoesNotExistError(
1081 'No commit refs could be found matching: %s' % ref)
1086 'No commit refs could be found matching: %s' % ref)
1082 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1087 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1083 selected = 'branch:%s:%s' % (
1088 selected = 'branch:%s:%s' % (
1084 repo.DEFAULT_BRANCH_NAME,
1089 repo.DEFAULT_BRANCH_NAME,
1085 repo.branches[repo.DEFAULT_BRANCH_NAME]
1090 repo.branches[repo.DEFAULT_BRANCH_NAME]
1086 )
1091 )
1087 elif repo.commit_ids:
1092 elif repo.commit_ids:
1088 rev = repo.commit_ids[0]
1093 rev = repo.commit_ids[0]
1089 selected = 'rev:%s:%s' % (rev, rev)
1094 selected = 'rev:%s:%s' % (rev, rev)
1090 else:
1095 else:
1091 raise EmptyRepositoryError()
1096 raise EmptyRepositoryError()
1092 return groups, selected
1097 return groups, selected
1093
1098
1094 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1099 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1095 pull_request = self.__get_pull_request(pull_request)
1100 pull_request = self.__get_pull_request(pull_request)
1096 return self._get_diff_from_pr_or_version(pull_request, context=context)
1101 return self._get_diff_from_pr_or_version(pull_request, context=context)
1097
1102
1098 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1103 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1099 source_repo = pr_or_version.source_repo
1104 source_repo = pr_or_version.source_repo
1100
1105
1101 # we swap org/other ref since we run a simple diff on one repo
1106 # we swap org/other ref since we run a simple diff on one repo
1102 target_ref_id = pr_or_version.target_ref_parts.commit_id
1107 target_ref_id = pr_or_version.target_ref_parts.commit_id
1103 source_ref_id = pr_or_version.source_ref_parts.commit_id
1108 source_ref_id = pr_or_version.source_ref_parts.commit_id
1104 target_commit = source_repo.get_commit(
1109 target_commit = source_repo.get_commit(
1105 commit_id=safe_str(target_ref_id))
1110 commit_id=safe_str(target_ref_id))
1106 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1111 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1107 vcs_repo = source_repo.scm_instance()
1112 vcs_repo = source_repo.scm_instance()
1108
1113
1109 # TODO: johbo: In the context of an update, we cannot reach
1114 # TODO: johbo: In the context of an update, we cannot reach
1110 # the old commit anymore with our normal mechanisms. It needs
1115 # the old commit anymore with our normal mechanisms. It needs
1111 # some sort of special support in the vcs layer to avoid this
1116 # some sort of special support in the vcs layer to avoid this
1112 # workaround.
1117 # workaround.
1113 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1118 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1114 vcs_repo.alias == 'git'):
1119 vcs_repo.alias == 'git'):
1115 source_commit.raw_id = safe_str(source_ref_id)
1120 source_commit.raw_id = safe_str(source_ref_id)
1116
1121
1117 log.debug('calculating diff between '
1122 log.debug('calculating diff between '
1118 'source_ref:%s and target_ref:%s for repo `%s`',
1123 'source_ref:%s and target_ref:%s for repo `%s`',
1119 target_ref_id, source_ref_id,
1124 target_ref_id, source_ref_id,
1120 safe_unicode(vcs_repo.path))
1125 safe_unicode(vcs_repo.path))
1121
1126
1122 vcs_diff = vcs_repo.get_diff(
1127 vcs_diff = vcs_repo.get_diff(
1123 commit1=target_commit, commit2=source_commit, context=context)
1128 commit1=target_commit, commit2=source_commit, context=context)
1124 return vcs_diff
1129 return vcs_diff
1125
1130
1126 def _is_merge_enabled(self, pull_request):
1131 def _is_merge_enabled(self, pull_request):
1127 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1128 settings = settings_model.get_general_settings()
1133 settings = settings_model.get_general_settings()
1129 return settings.get('rhodecode_pr_merge_enabled', False)
1134 return settings.get('rhodecode_pr_merge_enabled', False)
1130
1135
1131 def _use_rebase_for_merging(self, pull_request):
1136 def _use_rebase_for_merging(self, pull_request):
1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1137 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1133 settings = settings_model.get_general_settings()
1138 settings = settings_model.get_general_settings()
1134 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1139 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1135
1140
1136 def _log_action(self, action, user, pull_request):
1141 def _log_action(self, action, user, pull_request):
1137 action_logger(
1142 action_logger(
1138 user,
1143 user,
1139 '{action}:{pr_id}'.format(
1144 '{action}:{pr_id}'.format(
1140 action=action, pr_id=pull_request.pull_request_id),
1145 action=action, pr_id=pull_request.pull_request_id),
1141 pull_request.target_repo)
1146 pull_request.target_repo)
1142
1147
1143
1148
1144 ChangeTuple = namedtuple('ChangeTuple',
1149 ChangeTuple = namedtuple('ChangeTuple',
1145 ['added', 'common', 'removed'])
1150 ['added', 'common', 'removed'])
1146
1151
1147 FileChangeTuple = namedtuple('FileChangeTuple',
1152 FileChangeTuple = namedtuple('FileChangeTuple',
1148 ['added', 'modified', 'removed'])
1153 ['added', 'modified', 'removed'])
@@ -1,931 +1,934 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Repository model for rhodecode
22 Repository model for rhodecode
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import shutil
28 import shutil
29 import time
29 import time
30 import traceback
30 import traceback
31 from datetime import datetime
31 from datetime import datetime
32
32
33 from sqlalchemy.sql import func
33 from sqlalchemy.sql import func
34 from sqlalchemy.sql.expression import true, or_
34 from sqlalchemy.sql.expression import true, or_
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode import events
37 from rhodecode import events
38 from rhodecode.lib import helpers as h
38 from rhodecode.lib import helpers as h
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.exceptions import AttachedForksError
41 from rhodecode.lib.exceptions import AttachedForksError
42 from rhodecode.lib.hooks_base import log_delete_repository
42 from rhodecode.lib.hooks_base import log_delete_repository
43 from rhodecode.lib.utils import make_db_config
43 from rhodecode.lib.utils import make_db_config
44 from rhodecode.lib.utils2 import (
44 from rhodecode.lib.utils2 import (
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 from rhodecode.lib.vcs.backends import get_backend
47 from rhodecode.lib.vcs.backends import get_backend
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 RepoGroup, RepositoryField)
52 RepoGroup, RepositoryField)
53 from rhodecode.model.scm import UserGroupList
53 from rhodecode.model.scm import UserGroupList
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55
55
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class RepoModel(BaseModel):
60 class RepoModel(BaseModel):
61
61
62 cls = Repository
62 cls = Repository
63
63
64 def _get_user_group(self, users_group):
64 def _get_user_group(self, users_group):
65 return self._get_instance(UserGroup, users_group,
65 return self._get_instance(UserGroup, users_group,
66 callback=UserGroup.get_by_group_name)
66 callback=UserGroup.get_by_group_name)
67
67
68 def _get_repo_group(self, repo_group):
68 def _get_repo_group(self, repo_group):
69 return self._get_instance(RepoGroup, repo_group,
69 return self._get_instance(RepoGroup, repo_group,
70 callback=RepoGroup.get_by_group_name)
70 callback=RepoGroup.get_by_group_name)
71
71
72 def _create_default_perms(self, repository, private):
72 def _create_default_perms(self, repository, private):
73 # create default permission
73 # create default permission
74 default = 'repository.read'
74 default = 'repository.read'
75 def_user = User.get_default_user()
75 def_user = User.get_default_user()
76 for p in def_user.user_perms:
76 for p in def_user.user_perms:
77 if p.permission.permission_name.startswith('repository.'):
77 if p.permission.permission_name.startswith('repository.'):
78 default = p.permission.permission_name
78 default = p.permission.permission_name
79 break
79 break
80
80
81 default_perm = 'repository.none' if private else default
81 default_perm = 'repository.none' if private else default
82
82
83 repo_to_perm = UserRepoToPerm()
83 repo_to_perm = UserRepoToPerm()
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85
85
86 repo_to_perm.repository = repository
86 repo_to_perm.repository = repository
87 repo_to_perm.user_id = def_user.user_id
87 repo_to_perm.user_id = def_user.user_id
88
88
89 return repo_to_perm
89 return repo_to_perm
90
90
91 @LazyProperty
91 @LazyProperty
92 def repos_path(self):
92 def repos_path(self):
93 """
93 """
94 Gets the repositories root path from database
94 Gets the repositories root path from database
95 """
95 """
96 settings_model = VcsSettingsModel(sa=self.sa)
96 settings_model = VcsSettingsModel(sa=self.sa)
97 return settings_model.get_repos_location()
97 return settings_model.get_repos_location()
98
98
99 def get(self, repo_id, cache=False):
99 def get(self, repo_id, cache=False):
100 repo = self.sa.query(Repository) \
100 repo = self.sa.query(Repository) \
101 .filter(Repository.repo_id == repo_id)
101 .filter(Repository.repo_id == repo_id)
102
102
103 if cache:
103 if cache:
104 repo = repo.options(FromCache("sql_cache_short",
104 repo = repo.options(FromCache("sql_cache_short",
105 "get_repo_%s" % repo_id))
105 "get_repo_%s" % repo_id))
106 return repo.scalar()
106 return repo.scalar()
107
107
108 def get_repo(self, repository):
108 def get_repo(self, repository):
109 return self._get_repo(repository)
109 return self._get_repo(repository)
110
110
111 def get_by_repo_name(self, repo_name, cache=False):
111 def get_by_repo_name(self, repo_name, cache=False):
112 repo = self.sa.query(Repository) \
112 repo = self.sa.query(Repository) \
113 .filter(Repository.repo_name == repo_name)
113 .filter(Repository.repo_name == repo_name)
114
114
115 if cache:
115 if cache:
116 repo = repo.options(FromCache("sql_cache_short",
116 repo = repo.options(FromCache("sql_cache_short",
117 "get_repo_%s" % repo_name))
117 "get_repo_%s" % repo_name))
118 return repo.scalar()
118 return repo.scalar()
119
119
120 def _extract_id_from_repo_name(self, repo_name):
120 def _extract_id_from_repo_name(self, repo_name):
121 if repo_name.startswith('/'):
121 if repo_name.startswith('/'):
122 repo_name = repo_name.lstrip('/')
122 repo_name = repo_name.lstrip('/')
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 if by_id_match:
124 if by_id_match:
125 return by_id_match.groups()[0]
125 return by_id_match.groups()[0]
126
126
127 def get_repo_by_id(self, repo_name):
127 def get_repo_by_id(self, repo_name):
128 """
128 """
129 Extracts repo_name by id from special urls.
129 Extracts repo_name by id from special urls.
130 Example url is _11/repo_name
130 Example url is _11/repo_name
131
131
132 :param repo_name:
132 :param repo_name:
133 :return: repo object if matched else None
133 :return: repo object if matched else None
134 """
134 """
135 try:
135 try:
136 _repo_id = self._extract_id_from_repo_name(repo_name)
136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 if _repo_id:
137 if _repo_id:
138 return self.get(_repo_id)
138 return self.get(_repo_id)
139 except Exception:
139 except Exception:
140 log.exception('Failed to extract repo_name from URL')
140 log.exception('Failed to extract repo_name from URL')
141
141
142 return None
142 return None
143
143
144 def get_url(self, repo):
145 return url('summary_home', repo_name=repo.repo_name, qualified=True)
146
144 def get_users(self, name_contains=None, limit=20, only_active=True):
147 def get_users(self, name_contains=None, limit=20, only_active=True):
145 # TODO: mikhail: move this method to the UserModel.
148 # TODO: mikhail: move this method to the UserModel.
146 query = self.sa.query(User)
149 query = self.sa.query(User)
147 if only_active:
150 if only_active:
148 query = query.filter(User.active == true())
151 query = query.filter(User.active == true())
149
152
150 if name_contains:
153 if name_contains:
151 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
152 query = query.filter(
155 query = query.filter(
153 or_(
156 or_(
154 User.name.ilike(ilike_expression),
157 User.name.ilike(ilike_expression),
155 User.lastname.ilike(ilike_expression),
158 User.lastname.ilike(ilike_expression),
156 User.username.ilike(ilike_expression)
159 User.username.ilike(ilike_expression)
157 )
160 )
158 )
161 )
159 query = query.limit(limit)
162 query = query.limit(limit)
160 users = query.all()
163 users = query.all()
161
164
162 _users = [
165 _users = [
163 {
166 {
164 'id': user.user_id,
167 'id': user.user_id,
165 'first_name': user.name,
168 'first_name': user.name,
166 'last_name': user.lastname,
169 'last_name': user.lastname,
167 'username': user.username,
170 'username': user.username,
168 'icon_link': h.gravatar_url(user.email, 14),
171 'icon_link': h.gravatar_url(user.email, 14),
169 'value_display': h.person(user.email),
172 'value_display': h.person(user.email),
170 'value': user.username,
173 'value': user.username,
171 'value_type': 'user',
174 'value_type': 'user',
172 'active': user.active,
175 'active': user.active,
173 }
176 }
174 for user in users
177 for user in users
175 ]
178 ]
176 return _users
179 return _users
177
180
178 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
179 # TODO: mikhail: move this method to the UserGroupModel.
182 # TODO: mikhail: move this method to the UserGroupModel.
180 query = self.sa.query(UserGroup)
183 query = self.sa.query(UserGroup)
181 if only_active:
184 if only_active:
182 query = query.filter(UserGroup.users_group_active == true())
185 query = query.filter(UserGroup.users_group_active == true())
183
186
184 if name_contains:
187 if name_contains:
185 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
186 query = query.filter(
189 query = query.filter(
187 UserGroup.users_group_name.ilike(ilike_expression))\
190 UserGroup.users_group_name.ilike(ilike_expression))\
188 .order_by(func.length(UserGroup.users_group_name))\
191 .order_by(func.length(UserGroup.users_group_name))\
189 .order_by(UserGroup.users_group_name)
192 .order_by(UserGroup.users_group_name)
190
193
191 query = query.limit(limit)
194 query = query.limit(limit)
192 user_groups = query.all()
195 user_groups = query.all()
193 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
194 user_groups = UserGroupList(user_groups, perm_set=perm_set)
197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
195
198
196 _groups = [
199 _groups = [
197 {
200 {
198 'id': group.users_group_id,
201 'id': group.users_group_id,
199 # TODO: marcink figure out a way to generate the url for the
202 # TODO: marcink figure out a way to generate the url for the
200 # icon
203 # icon
201 'icon_link': '',
204 'icon_link': '',
202 'value_display': 'Group: %s (%d members)' % (
205 'value_display': 'Group: %s (%d members)' % (
203 group.users_group_name, len(group.members),),
206 group.users_group_name, len(group.members),),
204 'value': group.users_group_name,
207 'value': group.users_group_name,
205 'value_type': 'user_group',
208 'value_type': 'user_group',
206 'active': group.users_group_active,
209 'active': group.users_group_active,
207 }
210 }
208 for group in user_groups
211 for group in user_groups
209 ]
212 ]
210 return _groups
213 return _groups
211
214
212 @classmethod
215 @classmethod
213 def update_repoinfo(cls, repositories=None):
216 def update_repoinfo(cls, repositories=None):
214 if not repositories:
217 if not repositories:
215 repositories = Repository.getAll()
218 repositories = Repository.getAll()
216 for repo in repositories:
219 for repo in repositories:
217 repo.update_commit_cache()
220 repo.update_commit_cache()
218
221
219 def get_repos_as_dict(self, repo_list=None, admin=False,
222 def get_repos_as_dict(self, repo_list=None, admin=False,
220 super_user_actions=False):
223 super_user_actions=False):
221
224
222 from rhodecode.lib.utils import PartialRenderer
225 from rhodecode.lib.utils import PartialRenderer
223 _render = PartialRenderer('data_table/_dt_elements.html')
226 _render = PartialRenderer('data_table/_dt_elements.html')
224 c = _render.c
227 c = _render.c
225
228
226 def quick_menu(repo_name):
229 def quick_menu(repo_name):
227 return _render('quick_menu', repo_name)
230 return _render('quick_menu', repo_name)
228
231
229 def repo_lnk(name, rtype, rstate, private, fork_of):
232 def repo_lnk(name, rtype, rstate, private, fork_of):
230 return _render('repo_name', name, rtype, rstate, private, fork_of,
233 return _render('repo_name', name, rtype, rstate, private, fork_of,
231 short_name=not admin, admin=False)
234 short_name=not admin, admin=False)
232
235
233 def last_change(last_change):
236 def last_change(last_change):
234 return _render("last_change", last_change)
237 return _render("last_change", last_change)
235
238
236 def rss_lnk(repo_name):
239 def rss_lnk(repo_name):
237 return _render("rss", repo_name)
240 return _render("rss", repo_name)
238
241
239 def atom_lnk(repo_name):
242 def atom_lnk(repo_name):
240 return _render("atom", repo_name)
243 return _render("atom", repo_name)
241
244
242 def last_rev(repo_name, cs_cache):
245 def last_rev(repo_name, cs_cache):
243 return _render('revision', repo_name, cs_cache.get('revision'),
246 return _render('revision', repo_name, cs_cache.get('revision'),
244 cs_cache.get('raw_id'), cs_cache.get('author'),
247 cs_cache.get('raw_id'), cs_cache.get('author'),
245 cs_cache.get('message'))
248 cs_cache.get('message'))
246
249
247 def desc(desc):
250 def desc(desc):
248 if c.visual.stylify_metatags:
251 if c.visual.stylify_metatags:
249 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
250 else:
253 else:
251 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
252
255
253 def state(repo_state):
256 def state(repo_state):
254 return _render("repo_state", repo_state)
257 return _render("repo_state", repo_state)
255
258
256 def repo_actions(repo_name):
259 def repo_actions(repo_name):
257 return _render('repo_actions', repo_name, super_user_actions)
260 return _render('repo_actions', repo_name, super_user_actions)
258
261
259 def user_profile(username):
262 def user_profile(username):
260 return _render('user_profile', username)
263 return _render('user_profile', username)
261
264
262 repos_data = []
265 repos_data = []
263 for repo in repo_list:
266 for repo in repo_list:
264 cs_cache = repo.changeset_cache
267 cs_cache = repo.changeset_cache
265 row = {
268 row = {
266 "menu": quick_menu(repo.repo_name),
269 "menu": quick_menu(repo.repo_name),
267
270
268 "name": repo_lnk(repo.repo_name, repo.repo_type,
271 "name": repo_lnk(repo.repo_name, repo.repo_type,
269 repo.repo_state, repo.private, repo.fork),
272 repo.repo_state, repo.private, repo.fork),
270 "name_raw": repo.repo_name.lower(),
273 "name_raw": repo.repo_name.lower(),
271
274
272 "last_change": last_change(repo.last_db_change),
275 "last_change": last_change(repo.last_db_change),
273 "last_change_raw": datetime_to_time(repo.last_db_change),
276 "last_change_raw": datetime_to_time(repo.last_db_change),
274
277
275 "last_changeset": last_rev(repo.repo_name, cs_cache),
278 "last_changeset": last_rev(repo.repo_name, cs_cache),
276 "last_changeset_raw": cs_cache.get('revision'),
279 "last_changeset_raw": cs_cache.get('revision'),
277
280
278 "desc": desc(repo.description),
281 "desc": desc(repo.description),
279 "owner": user_profile(repo.user.username),
282 "owner": user_profile(repo.user.username),
280
283
281 "state": state(repo.repo_state),
284 "state": state(repo.repo_state),
282 "rss": rss_lnk(repo.repo_name),
285 "rss": rss_lnk(repo.repo_name),
283
286
284 "atom": atom_lnk(repo.repo_name),
287 "atom": atom_lnk(repo.repo_name),
285 }
288 }
286 if admin:
289 if admin:
287 row.update({
290 row.update({
288 "action": repo_actions(repo.repo_name),
291 "action": repo_actions(repo.repo_name),
289 })
292 })
290 repos_data.append(row)
293 repos_data.append(row)
291
294
292 return repos_data
295 return repos_data
293
296
294 def _get_defaults(self, repo_name):
297 def _get_defaults(self, repo_name):
295 """
298 """
296 Gets information about repository, and returns a dict for
299 Gets information about repository, and returns a dict for
297 usage in forms
300 usage in forms
298
301
299 :param repo_name:
302 :param repo_name:
300 """
303 """
301
304
302 repo_info = Repository.get_by_repo_name(repo_name)
305 repo_info = Repository.get_by_repo_name(repo_name)
303
306
304 if repo_info is None:
307 if repo_info is None:
305 return None
308 return None
306
309
307 defaults = repo_info.get_dict()
310 defaults = repo_info.get_dict()
308 defaults['repo_name'] = repo_info.just_name
311 defaults['repo_name'] = repo_info.just_name
309
312
310 groups = repo_info.groups_with_parents
313 groups = repo_info.groups_with_parents
311 parent_group = groups[-1] if groups else None
314 parent_group = groups[-1] if groups else None
312
315
313 # we use -1 as this is how in HTML, we mark an empty group
316 # we use -1 as this is how in HTML, we mark an empty group
314 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
315
318
316 keys_to_process = (
319 keys_to_process = (
317 {'k': 'repo_type', 'strip': False},
320 {'k': 'repo_type', 'strip': False},
318 {'k': 'repo_enable_downloads', 'strip': True},
321 {'k': 'repo_enable_downloads', 'strip': True},
319 {'k': 'repo_description', 'strip': True},
322 {'k': 'repo_description', 'strip': True},
320 {'k': 'repo_enable_locking', 'strip': True},
323 {'k': 'repo_enable_locking', 'strip': True},
321 {'k': 'repo_landing_rev', 'strip': True},
324 {'k': 'repo_landing_rev', 'strip': True},
322 {'k': 'clone_uri', 'strip': False},
325 {'k': 'clone_uri', 'strip': False},
323 {'k': 'repo_private', 'strip': True},
326 {'k': 'repo_private', 'strip': True},
324 {'k': 'repo_enable_statistics', 'strip': True}
327 {'k': 'repo_enable_statistics', 'strip': True}
325 )
328 )
326
329
327 for item in keys_to_process:
330 for item in keys_to_process:
328 attr = item['k']
331 attr = item['k']
329 if item['strip']:
332 if item['strip']:
330 attr = remove_prefix(item['k'], 'repo_')
333 attr = remove_prefix(item['k'], 'repo_')
331
334
332 val = defaults[attr]
335 val = defaults[attr]
333 if item['k'] == 'repo_landing_rev':
336 if item['k'] == 'repo_landing_rev':
334 val = ':'.join(defaults[attr])
337 val = ':'.join(defaults[attr])
335 defaults[item['k']] = val
338 defaults[item['k']] = val
336 if item['k'] == 'clone_uri':
339 if item['k'] == 'clone_uri':
337 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
338
341
339 # fill owner
342 # fill owner
340 if repo_info.user:
343 if repo_info.user:
341 defaults.update({'user': repo_info.user.username})
344 defaults.update({'user': repo_info.user.username})
342 else:
345 else:
343 replacement_user = User.get_first_super_admin().username
346 replacement_user = User.get_first_super_admin().username
344 defaults.update({'user': replacement_user})
347 defaults.update({'user': replacement_user})
345
348
346 # fill repository users
349 # fill repository users
347 for p in repo_info.repo_to_perm:
350 for p in repo_info.repo_to_perm:
348 defaults.update({'u_perm_%s' % p.user.user_id:
351 defaults.update({'u_perm_%s' % p.user.user_id:
349 p.permission.permission_name})
352 p.permission.permission_name})
350
353
351 # fill repository groups
354 # fill repository groups
352 for p in repo_info.users_group_to_perm:
355 for p in repo_info.users_group_to_perm:
353 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
354 p.permission.permission_name})
357 p.permission.permission_name})
355
358
356 return defaults
359 return defaults
357
360
358 def update(self, repo, **kwargs):
361 def update(self, repo, **kwargs):
359 try:
362 try:
360 cur_repo = self._get_repo(repo)
363 cur_repo = self._get_repo(repo)
361 source_repo_name = cur_repo.repo_name
364 source_repo_name = cur_repo.repo_name
362 if 'user' in kwargs:
365 if 'user' in kwargs:
363 cur_repo.user = User.get_by_username(kwargs['user'])
366 cur_repo.user = User.get_by_username(kwargs['user'])
364
367
365 if 'repo_group' in kwargs:
368 if 'repo_group' in kwargs:
366 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
367 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
368
371
369 update_keys = [
372 update_keys = [
370 (1, 'repo_enable_downloads'),
373 (1, 'repo_enable_downloads'),
371 (1, 'repo_description'),
374 (1, 'repo_description'),
372 (1, 'repo_enable_locking'),
375 (1, 'repo_enable_locking'),
373 (1, 'repo_landing_rev'),
376 (1, 'repo_landing_rev'),
374 (1, 'repo_private'),
377 (1, 'repo_private'),
375 (1, 'repo_enable_statistics'),
378 (1, 'repo_enable_statistics'),
376 (0, 'clone_uri'),
379 (0, 'clone_uri'),
377 (0, 'fork_id')
380 (0, 'fork_id')
378 ]
381 ]
379 for strip, k in update_keys:
382 for strip, k in update_keys:
380 if k in kwargs:
383 if k in kwargs:
381 val = kwargs[k]
384 val = kwargs[k]
382 if strip:
385 if strip:
383 k = remove_prefix(k, 'repo_')
386 k = remove_prefix(k, 'repo_')
384 if k == 'clone_uri':
387 if k == 'clone_uri':
385 from rhodecode.model.validators import Missing
388 from rhodecode.model.validators import Missing
386 _change = kwargs.get('clone_uri_change')
389 _change = kwargs.get('clone_uri_change')
387 if _change in [Missing, 'OLD']:
390 if _change in [Missing, 'OLD']:
388 # we don't change the value, so use original one
391 # we don't change the value, so use original one
389 val = cur_repo.clone_uri
392 val = cur_repo.clone_uri
390
393
391 setattr(cur_repo, k, val)
394 setattr(cur_repo, k, val)
392
395
393 new_name = cur_repo.get_new_name(kwargs['repo_name'])
396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
394 cur_repo.repo_name = new_name
397 cur_repo.repo_name = new_name
395
398
396 # if private flag is set, reset default permission to NONE
399 # if private flag is set, reset default permission to NONE
397 if kwargs.get('repo_private'):
400 if kwargs.get('repo_private'):
398 EMPTY_PERM = 'repository.none'
401 EMPTY_PERM = 'repository.none'
399 RepoModel().grant_user_permission(
402 RepoModel().grant_user_permission(
400 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
401 )
404 )
402
405
403 # handle extra fields
406 # handle extra fields
404 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
405 kwargs):
408 kwargs):
406 k = RepositoryField.un_prefix_key(field)
409 k = RepositoryField.un_prefix_key(field)
407 ex_field = RepositoryField.get_by_key_name(
410 ex_field = RepositoryField.get_by_key_name(
408 key=k, repo=cur_repo)
411 key=k, repo=cur_repo)
409 if ex_field:
412 if ex_field:
410 ex_field.field_value = kwargs[field]
413 ex_field.field_value = kwargs[field]
411 self.sa.add(ex_field)
414 self.sa.add(ex_field)
412 self.sa.add(cur_repo)
415 self.sa.add(cur_repo)
413
416
414 if source_repo_name != new_name:
417 if source_repo_name != new_name:
415 # rename repository
418 # rename repository
416 self._rename_filesystem_repo(
419 self._rename_filesystem_repo(
417 old=source_repo_name, new=new_name)
420 old=source_repo_name, new=new_name)
418
421
419 return cur_repo
422 return cur_repo
420 except Exception:
423 except Exception:
421 log.error(traceback.format_exc())
424 log.error(traceback.format_exc())
422 raise
425 raise
423
426
424 def _create_repo(self, repo_name, repo_type, description, owner,
427 def _create_repo(self, repo_name, repo_type, description, owner,
425 private=False, clone_uri=None, repo_group=None,
428 private=False, clone_uri=None, repo_group=None,
426 landing_rev='rev:tip', fork_of=None,
429 landing_rev='rev:tip', fork_of=None,
427 copy_fork_permissions=False, enable_statistics=False,
430 copy_fork_permissions=False, enable_statistics=False,
428 enable_locking=False, enable_downloads=False,
431 enable_locking=False, enable_downloads=False,
429 copy_group_permissions=False,
432 copy_group_permissions=False,
430 state=Repository.STATE_PENDING):
433 state=Repository.STATE_PENDING):
431 """
434 """
432 Create repository inside database with PENDING state, this should be
435 Create repository inside database with PENDING state, this should be
433 only executed by create() repo. With exception of importing existing
436 only executed by create() repo. With exception of importing existing
434 repos
437 repos
435 """
438 """
436 from rhodecode.model.scm import ScmModel
439 from rhodecode.model.scm import ScmModel
437
440
438 owner = self._get_user(owner)
441 owner = self._get_user(owner)
439 fork_of = self._get_repo(fork_of)
442 fork_of = self._get_repo(fork_of)
440 repo_group = self._get_repo_group(safe_int(repo_group))
443 repo_group = self._get_repo_group(safe_int(repo_group))
441
444
442 try:
445 try:
443 repo_name = safe_unicode(repo_name)
446 repo_name = safe_unicode(repo_name)
444 description = safe_unicode(description)
447 description = safe_unicode(description)
445 # repo name is just a name of repository
448 # repo name is just a name of repository
446 # while repo_name_full is a full qualified name that is combined
449 # while repo_name_full is a full qualified name that is combined
447 # with name and path of group
450 # with name and path of group
448 repo_name_full = repo_name
451 repo_name_full = repo_name
449 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
450
453
451 new_repo = Repository()
454 new_repo = Repository()
452 new_repo.repo_state = state
455 new_repo.repo_state = state
453 new_repo.enable_statistics = False
456 new_repo.enable_statistics = False
454 new_repo.repo_name = repo_name_full
457 new_repo.repo_name = repo_name_full
455 new_repo.repo_type = repo_type
458 new_repo.repo_type = repo_type
456 new_repo.user = owner
459 new_repo.user = owner
457 new_repo.group = repo_group
460 new_repo.group = repo_group
458 new_repo.description = description or repo_name
461 new_repo.description = description or repo_name
459 new_repo.private = private
462 new_repo.private = private
460 new_repo.clone_uri = clone_uri
463 new_repo.clone_uri = clone_uri
461 new_repo.landing_rev = landing_rev
464 new_repo.landing_rev = landing_rev
462
465
463 new_repo.enable_statistics = enable_statistics
466 new_repo.enable_statistics = enable_statistics
464 new_repo.enable_locking = enable_locking
467 new_repo.enable_locking = enable_locking
465 new_repo.enable_downloads = enable_downloads
468 new_repo.enable_downloads = enable_downloads
466
469
467 if repo_group:
470 if repo_group:
468 new_repo.enable_locking = repo_group.enable_locking
471 new_repo.enable_locking = repo_group.enable_locking
469
472
470 if fork_of:
473 if fork_of:
471 parent_repo = fork_of
474 parent_repo = fork_of
472 new_repo.fork = parent_repo
475 new_repo.fork = parent_repo
473
476
474 events.trigger(events.RepoPreCreateEvent(new_repo))
477 events.trigger(events.RepoPreCreateEvent(new_repo))
475
478
476 self.sa.add(new_repo)
479 self.sa.add(new_repo)
477
480
478 EMPTY_PERM = 'repository.none'
481 EMPTY_PERM = 'repository.none'
479 if fork_of and copy_fork_permissions:
482 if fork_of and copy_fork_permissions:
480 repo = fork_of
483 repo = fork_of
481 user_perms = UserRepoToPerm.query() \
484 user_perms = UserRepoToPerm.query() \
482 .filter(UserRepoToPerm.repository == repo).all()
485 .filter(UserRepoToPerm.repository == repo).all()
483 group_perms = UserGroupRepoToPerm.query() \
486 group_perms = UserGroupRepoToPerm.query() \
484 .filter(UserGroupRepoToPerm.repository == repo).all()
487 .filter(UserGroupRepoToPerm.repository == repo).all()
485
488
486 for perm in user_perms:
489 for perm in user_perms:
487 UserRepoToPerm.create(
490 UserRepoToPerm.create(
488 perm.user, new_repo, perm.permission)
491 perm.user, new_repo, perm.permission)
489
492
490 for perm in group_perms:
493 for perm in group_perms:
491 UserGroupRepoToPerm.create(
494 UserGroupRepoToPerm.create(
492 perm.users_group, new_repo, perm.permission)
495 perm.users_group, new_repo, perm.permission)
493 # in case we copy permissions and also set this repo to private
496 # in case we copy permissions and also set this repo to private
494 # override the default user permission to make it a private
497 # override the default user permission to make it a private
495 # repo
498 # repo
496 if private:
499 if private:
497 RepoModel(self.sa).grant_user_permission(
500 RepoModel(self.sa).grant_user_permission(
498 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
499
502
500 elif repo_group and copy_group_permissions:
503 elif repo_group and copy_group_permissions:
501 user_perms = UserRepoGroupToPerm.query() \
504 user_perms = UserRepoGroupToPerm.query() \
502 .filter(UserRepoGroupToPerm.group == repo_group).all()
505 .filter(UserRepoGroupToPerm.group == repo_group).all()
503
506
504 group_perms = UserGroupRepoGroupToPerm.query() \
507 group_perms = UserGroupRepoGroupToPerm.query() \
505 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
506
509
507 for perm in user_perms:
510 for perm in user_perms:
508 perm_name = perm.permission.permission_name.replace(
511 perm_name = perm.permission.permission_name.replace(
509 'group.', 'repository.')
512 'group.', 'repository.')
510 perm_obj = Permission.get_by_key(perm_name)
513 perm_obj = Permission.get_by_key(perm_name)
511 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
512
515
513 for perm in group_perms:
516 for perm in group_perms:
514 perm_name = perm.permission.permission_name.replace(
517 perm_name = perm.permission.permission_name.replace(
515 'group.', 'repository.')
518 'group.', 'repository.')
516 perm_obj = Permission.get_by_key(perm_name)
519 perm_obj = Permission.get_by_key(perm_name)
517 UserGroupRepoToPerm.create(
520 UserGroupRepoToPerm.create(
518 perm.users_group, new_repo, perm_obj)
521 perm.users_group, new_repo, perm_obj)
519
522
520 if private:
523 if private:
521 RepoModel(self.sa).grant_user_permission(
524 RepoModel(self.sa).grant_user_permission(
522 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
523
526
524 else:
527 else:
525 perm_obj = self._create_default_perms(new_repo, private)
528 perm_obj = self._create_default_perms(new_repo, private)
526 self.sa.add(perm_obj)
529 self.sa.add(perm_obj)
527
530
528 # now automatically start following this repository as owner
531 # now automatically start following this repository as owner
529 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
530 owner.user_id)
533 owner.user_id)
531
534
532 # we need to flush here, in order to check if database won't
535 # we need to flush here, in order to check if database won't
533 # throw any exceptions, create filesystem dirs at the very end
536 # throw any exceptions, create filesystem dirs at the very end
534 self.sa.flush()
537 self.sa.flush()
535 events.trigger(events.RepoCreatedEvent(new_repo))
538 events.trigger(events.RepoCreatedEvent(new_repo))
536 return new_repo
539 return new_repo
537
540
538 except Exception:
541 except Exception:
539 log.error(traceback.format_exc())
542 log.error(traceback.format_exc())
540 raise
543 raise
541
544
542 def create(self, form_data, cur_user):
545 def create(self, form_data, cur_user):
543 """
546 """
544 Create repository using celery tasks
547 Create repository using celery tasks
545
548
546 :param form_data:
549 :param form_data:
547 :param cur_user:
550 :param cur_user:
548 """
551 """
549 from rhodecode.lib.celerylib import tasks, run_task
552 from rhodecode.lib.celerylib import tasks, run_task
550 return run_task(tasks.create_repo, form_data, cur_user)
553 return run_task(tasks.create_repo, form_data, cur_user)
551
554
552 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
553 perm_deletions=None, check_perms=True,
556 perm_deletions=None, check_perms=True,
554 cur_user=None):
557 cur_user=None):
555 if not perm_additions:
558 if not perm_additions:
556 perm_additions = []
559 perm_additions = []
557 if not perm_updates:
560 if not perm_updates:
558 perm_updates = []
561 perm_updates = []
559 if not perm_deletions:
562 if not perm_deletions:
560 perm_deletions = []
563 perm_deletions = []
561
564
562 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
563
566
564 # update permissions
567 # update permissions
565 for member_id, perm, member_type in perm_updates:
568 for member_id, perm, member_type in perm_updates:
566 member_id = int(member_id)
569 member_id = int(member_id)
567 if member_type == 'user':
570 if member_type == 'user':
568 # this updates also current one if found
571 # this updates also current one if found
569 self.grant_user_permission(
572 self.grant_user_permission(
570 repo=repo, user=member_id, perm=perm)
573 repo=repo, user=member_id, perm=perm)
571 else: # set for user group
574 else: # set for user group
572 # check if we have permissions to alter this usergroup
575 # check if we have permissions to alter this usergroup
573 member_name = UserGroup.get(member_id).users_group_name
576 member_name = UserGroup.get(member_id).users_group_name
574 if not check_perms or HasUserGroupPermissionAny(
577 if not check_perms or HasUserGroupPermissionAny(
575 *req_perms)(member_name, user=cur_user):
578 *req_perms)(member_name, user=cur_user):
576 self.grant_user_group_permission(
579 self.grant_user_group_permission(
577 repo=repo, group_name=member_id, perm=perm)
580 repo=repo, group_name=member_id, perm=perm)
578
581
579 # set new permissions
582 # set new permissions
580 for member_id, perm, member_type in perm_additions:
583 for member_id, perm, member_type in perm_additions:
581 member_id = int(member_id)
584 member_id = int(member_id)
582 if member_type == 'user':
585 if member_type == 'user':
583 self.grant_user_permission(
586 self.grant_user_permission(
584 repo=repo, user=member_id, perm=perm)
587 repo=repo, user=member_id, perm=perm)
585 else: # set for user group
588 else: # set for user group
586 # check if we have permissions to alter this usergroup
589 # check if we have permissions to alter this usergroup
587 member_name = UserGroup.get(member_id).users_group_name
590 member_name = UserGroup.get(member_id).users_group_name
588 if not check_perms or HasUserGroupPermissionAny(
591 if not check_perms or HasUserGroupPermissionAny(
589 *req_perms)(member_name, user=cur_user):
592 *req_perms)(member_name, user=cur_user):
590 self.grant_user_group_permission(
593 self.grant_user_group_permission(
591 repo=repo, group_name=member_id, perm=perm)
594 repo=repo, group_name=member_id, perm=perm)
592
595
593 # delete permissions
596 # delete permissions
594 for member_id, perm, member_type in perm_deletions:
597 for member_id, perm, member_type in perm_deletions:
595 member_id = int(member_id)
598 member_id = int(member_id)
596 if member_type == 'user':
599 if member_type == 'user':
597 self.revoke_user_permission(repo=repo, user=member_id)
600 self.revoke_user_permission(repo=repo, user=member_id)
598 else: # set for user group
601 else: # set for user group
599 # check if we have permissions to alter this usergroup
602 # check if we have permissions to alter this usergroup
600 member_name = UserGroup.get(member_id).users_group_name
603 member_name = UserGroup.get(member_id).users_group_name
601 if not check_perms or HasUserGroupPermissionAny(
604 if not check_perms or HasUserGroupPermissionAny(
602 *req_perms)(member_name, user=cur_user):
605 *req_perms)(member_name, user=cur_user):
603 self.revoke_user_group_permission(
606 self.revoke_user_group_permission(
604 repo=repo, group_name=member_id)
607 repo=repo, group_name=member_id)
605
608
606 def create_fork(self, form_data, cur_user):
609 def create_fork(self, form_data, cur_user):
607 """
610 """
608 Simple wrapper into executing celery task for fork creation
611 Simple wrapper into executing celery task for fork creation
609
612
610 :param form_data:
613 :param form_data:
611 :param cur_user:
614 :param cur_user:
612 """
615 """
613 from rhodecode.lib.celerylib import tasks, run_task
616 from rhodecode.lib.celerylib import tasks, run_task
614 return run_task(tasks.create_repo_fork, form_data, cur_user)
617 return run_task(tasks.create_repo_fork, form_data, cur_user)
615
618
616 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
617 """
620 """
618 Delete given repository, forks parameter defines what do do with
621 Delete given repository, forks parameter defines what do do with
619 attached forks. Throws AttachedForksError if deleted repo has attached
622 attached forks. Throws AttachedForksError if deleted repo has attached
620 forks
623 forks
621
624
622 :param repo:
625 :param repo:
623 :param forks: str 'delete' or 'detach'
626 :param forks: str 'delete' or 'detach'
624 :param fs_remove: remove(archive) repo from filesystem
627 :param fs_remove: remove(archive) repo from filesystem
625 """
628 """
626 if not cur_user:
629 if not cur_user:
627 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
628 repo = self._get_repo(repo)
631 repo = self._get_repo(repo)
629 if repo:
632 if repo:
630 if forks == 'detach':
633 if forks == 'detach':
631 for r in repo.forks:
634 for r in repo.forks:
632 r.fork = None
635 r.fork = None
633 self.sa.add(r)
636 self.sa.add(r)
634 elif forks == 'delete':
637 elif forks == 'delete':
635 for r in repo.forks:
638 for r in repo.forks:
636 self.delete(r, forks='delete')
639 self.delete(r, forks='delete')
637 elif [f for f in repo.forks]:
640 elif [f for f in repo.forks]:
638 raise AttachedForksError()
641 raise AttachedForksError()
639
642
640 old_repo_dict = repo.get_dict()
643 old_repo_dict = repo.get_dict()
641 events.trigger(events.RepoPreDeleteEvent(repo))
644 events.trigger(events.RepoPreDeleteEvent(repo))
642 try:
645 try:
643 self.sa.delete(repo)
646 self.sa.delete(repo)
644 if fs_remove:
647 if fs_remove:
645 self._delete_filesystem_repo(repo)
648 self._delete_filesystem_repo(repo)
646 else:
649 else:
647 log.debug('skipping removal from filesystem')
650 log.debug('skipping removal from filesystem')
648 old_repo_dict.update({
651 old_repo_dict.update({
649 'deleted_by': cur_user,
652 'deleted_by': cur_user,
650 'deleted_on': time.time(),
653 'deleted_on': time.time(),
651 })
654 })
652 log_delete_repository(**old_repo_dict)
655 log_delete_repository(**old_repo_dict)
653 events.trigger(events.RepoDeletedEvent(repo))
656 events.trigger(events.RepoDeletedEvent(repo))
654 except Exception:
657 except Exception:
655 log.error(traceback.format_exc())
658 log.error(traceback.format_exc())
656 raise
659 raise
657
660
658 def grant_user_permission(self, repo, user, perm):
661 def grant_user_permission(self, repo, user, perm):
659 """
662 """
660 Grant permission for user on given repository, or update existing one
663 Grant permission for user on given repository, or update existing one
661 if found
664 if found
662
665
663 :param repo: Instance of Repository, repository_id, or repository name
666 :param repo: Instance of Repository, repository_id, or repository name
664 :param user: Instance of User, user_id or username
667 :param user: Instance of User, user_id or username
665 :param perm: Instance of Permission, or permission_name
668 :param perm: Instance of Permission, or permission_name
666 """
669 """
667 user = self._get_user(user)
670 user = self._get_user(user)
668 repo = self._get_repo(repo)
671 repo = self._get_repo(repo)
669 permission = self._get_perm(perm)
672 permission = self._get_perm(perm)
670
673
671 # check if we have that permission already
674 # check if we have that permission already
672 obj = self.sa.query(UserRepoToPerm) \
675 obj = self.sa.query(UserRepoToPerm) \
673 .filter(UserRepoToPerm.user == user) \
676 .filter(UserRepoToPerm.user == user) \
674 .filter(UserRepoToPerm.repository == repo) \
677 .filter(UserRepoToPerm.repository == repo) \
675 .scalar()
678 .scalar()
676 if obj is None:
679 if obj is None:
677 # create new !
680 # create new !
678 obj = UserRepoToPerm()
681 obj = UserRepoToPerm()
679 obj.repository = repo
682 obj.repository = repo
680 obj.user = user
683 obj.user = user
681 obj.permission = permission
684 obj.permission = permission
682 self.sa.add(obj)
685 self.sa.add(obj)
683 log.debug('Granted perm %s to %s on %s', perm, user, repo)
686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
684 action_logger_generic(
687 action_logger_generic(
685 'granted permission: {} to user: {} on repo: {}'.format(
688 'granted permission: {} to user: {} on repo: {}'.format(
686 perm, user, repo), namespace='security.repo')
689 perm, user, repo), namespace='security.repo')
687 return obj
690 return obj
688
691
689 def revoke_user_permission(self, repo, user):
692 def revoke_user_permission(self, repo, user):
690 """
693 """
691 Revoke permission for user on given repository
694 Revoke permission for user on given repository
692
695
693 :param repo: Instance of Repository, repository_id, or repository name
696 :param repo: Instance of Repository, repository_id, or repository name
694 :param user: Instance of User, user_id or username
697 :param user: Instance of User, user_id or username
695 """
698 """
696
699
697 user = self._get_user(user)
700 user = self._get_user(user)
698 repo = self._get_repo(repo)
701 repo = self._get_repo(repo)
699
702
700 obj = self.sa.query(UserRepoToPerm) \
703 obj = self.sa.query(UserRepoToPerm) \
701 .filter(UserRepoToPerm.repository == repo) \
704 .filter(UserRepoToPerm.repository == repo) \
702 .filter(UserRepoToPerm.user == user) \
705 .filter(UserRepoToPerm.user == user) \
703 .scalar()
706 .scalar()
704 if obj:
707 if obj:
705 self.sa.delete(obj)
708 self.sa.delete(obj)
706 log.debug('Revoked perm on %s on %s', repo, user)
709 log.debug('Revoked perm on %s on %s', repo, user)
707 action_logger_generic(
710 action_logger_generic(
708 'revoked permission from user: {} on repo: {}'.format(
711 'revoked permission from user: {} on repo: {}'.format(
709 user, repo), namespace='security.repo')
712 user, repo), namespace='security.repo')
710
713
711 def grant_user_group_permission(self, repo, group_name, perm):
714 def grant_user_group_permission(self, repo, group_name, perm):
712 """
715 """
713 Grant permission for user group on given repository, or update
716 Grant permission for user group on given repository, or update
714 existing one if found
717 existing one if found
715
718
716 :param repo: Instance of Repository, repository_id, or repository name
719 :param repo: Instance of Repository, repository_id, or repository name
717 :param group_name: Instance of UserGroup, users_group_id,
720 :param group_name: Instance of UserGroup, users_group_id,
718 or user group name
721 or user group name
719 :param perm: Instance of Permission, or permission_name
722 :param perm: Instance of Permission, or permission_name
720 """
723 """
721 repo = self._get_repo(repo)
724 repo = self._get_repo(repo)
722 group_name = self._get_user_group(group_name)
725 group_name = self._get_user_group(group_name)
723 permission = self._get_perm(perm)
726 permission = self._get_perm(perm)
724
727
725 # check if we have that permission already
728 # check if we have that permission already
726 obj = self.sa.query(UserGroupRepoToPerm) \
729 obj = self.sa.query(UserGroupRepoToPerm) \
727 .filter(UserGroupRepoToPerm.users_group == group_name) \
730 .filter(UserGroupRepoToPerm.users_group == group_name) \
728 .filter(UserGroupRepoToPerm.repository == repo) \
731 .filter(UserGroupRepoToPerm.repository == repo) \
729 .scalar()
732 .scalar()
730
733
731 if obj is None:
734 if obj is None:
732 # create new
735 # create new
733 obj = UserGroupRepoToPerm()
736 obj = UserGroupRepoToPerm()
734
737
735 obj.repository = repo
738 obj.repository = repo
736 obj.users_group = group_name
739 obj.users_group = group_name
737 obj.permission = permission
740 obj.permission = permission
738 self.sa.add(obj)
741 self.sa.add(obj)
739 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
740 action_logger_generic(
743 action_logger_generic(
741 'granted permission: {} to usergroup: {} on repo: {}'.format(
744 'granted permission: {} to usergroup: {} on repo: {}'.format(
742 perm, group_name, repo), namespace='security.repo')
745 perm, group_name, repo), namespace='security.repo')
743
746
744 return obj
747 return obj
745
748
746 def revoke_user_group_permission(self, repo, group_name):
749 def revoke_user_group_permission(self, repo, group_name):
747 """
750 """
748 Revoke permission for user group on given repository
751 Revoke permission for user group on given repository
749
752
750 :param repo: Instance of Repository, repository_id, or repository name
753 :param repo: Instance of Repository, repository_id, or repository name
751 :param group_name: Instance of UserGroup, users_group_id,
754 :param group_name: Instance of UserGroup, users_group_id,
752 or user group name
755 or user group name
753 """
756 """
754 repo = self._get_repo(repo)
757 repo = self._get_repo(repo)
755 group_name = self._get_user_group(group_name)
758 group_name = self._get_user_group(group_name)
756
759
757 obj = self.sa.query(UserGroupRepoToPerm) \
760 obj = self.sa.query(UserGroupRepoToPerm) \
758 .filter(UserGroupRepoToPerm.repository == repo) \
761 .filter(UserGroupRepoToPerm.repository == repo) \
759 .filter(UserGroupRepoToPerm.users_group == group_name) \
762 .filter(UserGroupRepoToPerm.users_group == group_name) \
760 .scalar()
763 .scalar()
761 if obj:
764 if obj:
762 self.sa.delete(obj)
765 self.sa.delete(obj)
763 log.debug('Revoked perm to %s on %s', repo, group_name)
766 log.debug('Revoked perm to %s on %s', repo, group_name)
764 action_logger_generic(
767 action_logger_generic(
765 'revoked permission from usergroup: {} on repo: {}'.format(
768 'revoked permission from usergroup: {} on repo: {}'.format(
766 group_name, repo), namespace='security.repo')
769 group_name, repo), namespace='security.repo')
767
770
768 def delete_stats(self, repo_name):
771 def delete_stats(self, repo_name):
769 """
772 """
770 removes stats for given repo
773 removes stats for given repo
771
774
772 :param repo_name:
775 :param repo_name:
773 """
776 """
774 repo = self._get_repo(repo_name)
777 repo = self._get_repo(repo_name)
775 try:
778 try:
776 obj = self.sa.query(Statistics) \
779 obj = self.sa.query(Statistics) \
777 .filter(Statistics.repository == repo).scalar()
780 .filter(Statistics.repository == repo).scalar()
778 if obj:
781 if obj:
779 self.sa.delete(obj)
782 self.sa.delete(obj)
780 except Exception:
783 except Exception:
781 log.error(traceback.format_exc())
784 log.error(traceback.format_exc())
782 raise
785 raise
783
786
784 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
785 field_type='str', field_desc=''):
788 field_type='str', field_desc=''):
786
789
787 repo = self._get_repo(repo_name)
790 repo = self._get_repo(repo_name)
788
791
789 new_field = RepositoryField()
792 new_field = RepositoryField()
790 new_field.repository = repo
793 new_field.repository = repo
791 new_field.field_key = field_key
794 new_field.field_key = field_key
792 new_field.field_type = field_type # python type
795 new_field.field_type = field_type # python type
793 new_field.field_value = field_value
796 new_field.field_value = field_value
794 new_field.field_desc = field_desc
797 new_field.field_desc = field_desc
795 new_field.field_label = field_label
798 new_field.field_label = field_label
796 self.sa.add(new_field)
799 self.sa.add(new_field)
797 return new_field
800 return new_field
798
801
799 def delete_repo_field(self, repo_name, field_key):
802 def delete_repo_field(self, repo_name, field_key):
800 repo = self._get_repo(repo_name)
803 repo = self._get_repo(repo_name)
801 field = RepositoryField.get_by_key_name(field_key, repo)
804 field = RepositoryField.get_by_key_name(field_key, repo)
802 if field:
805 if field:
803 self.sa.delete(field)
806 self.sa.delete(field)
804
807
805 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
806 clone_uri=None, repo_store_location=None,
809 clone_uri=None, repo_store_location=None,
807 use_global_config=False):
810 use_global_config=False):
808 """
811 """
809 makes repository on filesystem. It's group aware means it'll create
812 makes repository on filesystem. It's group aware means it'll create
810 a repository within a group, and alter the paths accordingly of
813 a repository within a group, and alter the paths accordingly of
811 group location
814 group location
812
815
813 :param repo_name:
816 :param repo_name:
814 :param alias:
817 :param alias:
815 :param parent:
818 :param parent:
816 :param clone_uri:
819 :param clone_uri:
817 :param repo_store_location:
820 :param repo_store_location:
818 """
821 """
819 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
820 from rhodecode.model.scm import ScmModel
823 from rhodecode.model.scm import ScmModel
821
824
822 if Repository.NAME_SEP in repo_name:
825 if Repository.NAME_SEP in repo_name:
823 raise ValueError(
826 raise ValueError(
824 'repo_name must not contain groups got `%s`' % repo_name)
827 'repo_name must not contain groups got `%s`' % repo_name)
825
828
826 if isinstance(repo_group, RepoGroup):
829 if isinstance(repo_group, RepoGroup):
827 new_parent_path = os.sep.join(repo_group.full_path_splitted)
830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
828 else:
831 else:
829 new_parent_path = repo_group or ''
832 new_parent_path = repo_group or ''
830
833
831 if repo_store_location:
834 if repo_store_location:
832 _paths = [repo_store_location]
835 _paths = [repo_store_location]
833 else:
836 else:
834 _paths = [self.repos_path, new_parent_path, repo_name]
837 _paths = [self.repos_path, new_parent_path, repo_name]
835 # we need to make it str for mercurial
838 # we need to make it str for mercurial
836 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
837
840
838 # check if this path is not a repository
841 # check if this path is not a repository
839 if is_valid_repo(repo_path, self.repos_path):
842 if is_valid_repo(repo_path, self.repos_path):
840 raise Exception('This path %s is a valid repository' % repo_path)
843 raise Exception('This path %s is a valid repository' % repo_path)
841
844
842 # check if this path is a group
845 # check if this path is a group
843 if is_valid_repo_group(repo_path, self.repos_path):
846 if is_valid_repo_group(repo_path, self.repos_path):
844 raise Exception('This path %s is a valid group' % repo_path)
847 raise Exception('This path %s is a valid group' % repo_path)
845
848
846 log.info('creating repo %s in %s from url: `%s`',
849 log.info('creating repo %s in %s from url: `%s`',
847 repo_name, safe_unicode(repo_path),
850 repo_name, safe_unicode(repo_path),
848 obfuscate_url_pw(clone_uri))
851 obfuscate_url_pw(clone_uri))
849
852
850 backend = get_backend(repo_type)
853 backend = get_backend(repo_type)
851
854
852 config_repo = None if use_global_config else repo_name
855 config_repo = None if use_global_config else repo_name
853 if config_repo and new_parent_path:
856 if config_repo and new_parent_path:
854 config_repo = Repository.NAME_SEP.join(
857 config_repo = Repository.NAME_SEP.join(
855 (new_parent_path, config_repo))
858 (new_parent_path, config_repo))
856 config = make_db_config(clear_session=False, repo=config_repo)
859 config = make_db_config(clear_session=False, repo=config_repo)
857 config.set('extensions', 'largefiles', '')
860 config.set('extensions', 'largefiles', '')
858
861
859 # patch and reset hooks section of UI config to not run any
862 # patch and reset hooks section of UI config to not run any
860 # hooks on creating remote repo
863 # hooks on creating remote repo
861 config.clear_section('hooks')
864 config.clear_section('hooks')
862
865
863 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
864 if repo_type == 'git':
867 if repo_type == 'git':
865 repo = backend(
868 repo = backend(
866 repo_path, config=config, create=True, src_url=clone_uri,
869 repo_path, config=config, create=True, src_url=clone_uri,
867 bare=True)
870 bare=True)
868 else:
871 else:
869 repo = backend(
872 repo = backend(
870 repo_path, config=config, create=True, src_url=clone_uri)
873 repo_path, config=config, create=True, src_url=clone_uri)
871
874
872 ScmModel().install_hooks(repo, repo_type=repo_type)
875 ScmModel().install_hooks(repo, repo_type=repo_type)
873
876
874 log.debug('Created repo %s with %s backend',
877 log.debug('Created repo %s with %s backend',
875 safe_unicode(repo_name), safe_unicode(repo_type))
878 safe_unicode(repo_name), safe_unicode(repo_type))
876 return repo
879 return repo
877
880
878 def _rename_filesystem_repo(self, old, new):
881 def _rename_filesystem_repo(self, old, new):
879 """
882 """
880 renames repository on filesystem
883 renames repository on filesystem
881
884
882 :param old: old name
885 :param old: old name
883 :param new: new name
886 :param new: new name
884 """
887 """
885 log.info('renaming repo from %s to %s', old, new)
888 log.info('renaming repo from %s to %s', old, new)
886
889
887 old_path = os.path.join(self.repos_path, old)
890 old_path = os.path.join(self.repos_path, old)
888 new_path = os.path.join(self.repos_path, new)
891 new_path = os.path.join(self.repos_path, new)
889 if os.path.isdir(new_path):
892 if os.path.isdir(new_path):
890 raise Exception(
893 raise Exception(
891 'Was trying to rename to already existing dir %s' % new_path
894 'Was trying to rename to already existing dir %s' % new_path
892 )
895 )
893 shutil.move(old_path, new_path)
896 shutil.move(old_path, new_path)
894
897
895 def _delete_filesystem_repo(self, repo):
898 def _delete_filesystem_repo(self, repo):
896 """
899 """
897 removes repo from filesystem, the removal is acctually made by
900 removes repo from filesystem, the removal is acctually made by
898 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
899 repository is no longer valid for rhodecode, can be undeleted later on
902 repository is no longer valid for rhodecode, can be undeleted later on
900 by reverting the renames on this repository
903 by reverting the renames on this repository
901
904
902 :param repo: repo object
905 :param repo: repo object
903 """
906 """
904 rm_path = os.path.join(self.repos_path, repo.repo_name)
907 rm_path = os.path.join(self.repos_path, repo.repo_name)
905 repo_group = repo.group
908 repo_group = repo.group
906 log.info("Removing repository %s", rm_path)
909 log.info("Removing repository %s", rm_path)
907 # disable hg/git internal that it doesn't get detected as repo
910 # disable hg/git internal that it doesn't get detected as repo
908 alias = repo.repo_type
911 alias = repo.repo_type
909
912
910 config = make_db_config(clear_session=False)
913 config = make_db_config(clear_session=False)
911 config.set('extensions', 'largefiles', '')
914 config.set('extensions', 'largefiles', '')
912 bare = getattr(repo.scm_instance(config=config), 'bare', False)
915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
913
916
914 # skip this for bare git repos
917 # skip this for bare git repos
915 if not bare:
918 if not bare:
916 # disable VCS repo
919 # disable VCS repo
917 vcs_path = os.path.join(rm_path, '.%s' % alias)
920 vcs_path = os.path.join(rm_path, '.%s' % alias)
918 if os.path.exists(vcs_path):
921 if os.path.exists(vcs_path):
919 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
920
923
921 _now = datetime.now()
924 _now = datetime.now()
922 _ms = str(_now.microsecond).rjust(6, '0')
925 _ms = str(_now.microsecond).rjust(6, '0')
923 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
924 repo.just_name)
927 repo.just_name)
925 if repo_group:
928 if repo_group:
926 # if repository is in group, prefix the removal path with the group
929 # if repository is in group, prefix the removal path with the group
927 args = repo_group.full_path_splitted + [_d]
930 args = repo_group.full_path_splitted + [_d]
928 _d = os.path.join(*args)
931 _d = os.path.join(*args)
929
932
930 if os.path.isdir(rm_path):
933 if os.path.isdir(rm_path):
931 shutil.move(rm_path, os.path.join(self.repos_path, _d))
934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,249 +1,250 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Import early to make sure things are patched up properly
3 # Import early to make sure things are patched up properly
4 from setuptools import setup, find_packages
4 from setuptools import setup, find_packages
5
5
6 import os
6 import os
7 import sys
7 import sys
8 import platform
8 import platform
9
9
10 if sys.version_info < (2, 7):
10 if sys.version_info < (2, 7):
11 raise Exception('RhodeCode requires Python 2.7 or later')
11 raise Exception('RhodeCode requires Python 2.7 or later')
12
12
13
13
14 here = os.path.abspath(os.path.dirname(__file__))
14 here = os.path.abspath(os.path.dirname(__file__))
15
15
16
16
17 def _get_meta_var(name, data, callback_handler=None):
17 def _get_meta_var(name, data, callback_handler=None):
18 import re
18 import re
19 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
19 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
20 if matches:
20 if matches:
21 if not callable(callback_handler):
21 if not callable(callback_handler):
22 callback_handler = lambda v: v
22 callback_handler = lambda v: v
23
23
24 return callback_handler(eval(matches.groups()[0]))
24 return callback_handler(eval(matches.groups()[0]))
25
25
26 _meta = open(os.path.join(here, 'rhodecode', '__init__.py'), 'rb')
26 _meta = open(os.path.join(here, 'rhodecode', '__init__.py'), 'rb')
27 _metadata = _meta.read()
27 _metadata = _meta.read()
28 _meta.close()
28 _meta.close()
29
29
30 callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
30 callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
31 __version__ = open(os.path.join('rhodecode', 'VERSION')).read().strip()
31 __version__ = open(os.path.join('rhodecode', 'VERSION')).read().strip()
32 __license__ = _get_meta_var('__license__', _metadata)
32 __license__ = _get_meta_var('__license__', _metadata)
33 __author__ = _get_meta_var('__author__', _metadata)
33 __author__ = _get_meta_var('__author__', _metadata)
34 __url__ = _get_meta_var('__url__', _metadata)
34 __url__ = _get_meta_var('__url__', _metadata)
35 # defines current platform
35 # defines current platform
36 __platform__ = platform.system()
36 __platform__ = platform.system()
37
37
38 # Cygwin has different platform identifiers, but they all contain the
38 # Cygwin has different platform identifiers, but they all contain the
39 # term "CYGWIN"
39 # term "CYGWIN"
40 is_windows = __platform__ == 'Windows' or 'CYGWIN' in __platform__
40 is_windows = __platform__ == 'Windows' or 'CYGWIN' in __platform__
41
41
42 requirements = [
42 requirements = [
43 'Babel',
43 'Babel',
44 'Beaker',
44 'Beaker',
45 'FormEncode',
45 'FormEncode',
46 'Mako',
46 'Mako',
47 'Markdown',
47 'Markdown',
48 'MarkupSafe',
48 'MarkupSafe',
49 'MySQL-python',
49 'MySQL-python',
50 'Paste',
50 'Paste',
51 'PasteDeploy',
51 'PasteDeploy',
52 'PasteScript',
52 'PasteScript',
53 'Pygments',
53 'Pygments',
54 'Pylons',
54 'Pylons',
55 'Pyro4',
55 'Pyro4',
56 'Routes',
56 'Routes',
57 'SQLAlchemy',
57 'SQLAlchemy',
58 'Tempita',
58 'Tempita',
59 'URLObject',
59 'URLObject',
60 'WebError',
60 'WebError',
61 'WebHelpers',
61 'WebHelpers',
62 'WebHelpers2',
62 'WebHelpers2',
63 'WebOb',
63 'WebOb',
64 'WebTest',
64 'WebTest',
65 'Whoosh',
65 'Whoosh',
66 'alembic',
66 'alembic',
67 'amqplib',
67 'amqplib',
68 'anyjson',
68 'anyjson',
69 'appenlight-client',
69 'appenlight-client',
70 'authomatic',
70 'authomatic',
71 'backport_ipaddress',
71 'backport_ipaddress',
72 'celery',
72 'celery',
73 'colander',
73 'colander',
74 'decorator',
74 'decorator',
75 'docutils',
75 'docutils',
76 'gunicorn',
76 'gunicorn',
77 'infrae.cache',
77 'infrae.cache',
78 'ipython',
78 'ipython',
79 'iso8601',
79 'iso8601',
80 'kombu',
80 'kombu',
81 'marshmallow',
81 'msgpack-python',
82 'msgpack-python',
82 'packaging',
83 'packaging',
83 'psycopg2',
84 'psycopg2',
84 'py-gfm',
85 'py-gfm',
85 'pycrypto',
86 'pycrypto',
86 'pycurl',
87 'pycurl',
87 'pyparsing',
88 'pyparsing',
88 'pyramid',
89 'pyramid',
89 'pyramid-debugtoolbar',
90 'pyramid-debugtoolbar',
90 'pyramid-mako',
91 'pyramid-mako',
91 'pyramid-beaker',
92 'pyramid-beaker',
92 'pysqlite',
93 'pysqlite',
93 'python-dateutil',
94 'python-dateutil',
94 'python-ldap',
95 'python-ldap',
95 'python-memcached',
96 'python-memcached',
96 'python-pam',
97 'python-pam',
97 'recaptcha-client',
98 'recaptcha-client',
98 'repoze.lru',
99 'repoze.lru',
99 'requests',
100 'requests',
100 'simplejson',
101 'simplejson',
101 'waitress',
102 'waitress',
102 'zope.cachedescriptors',
103 'zope.cachedescriptors',
103 'dogpile.cache',
104 'dogpile.cache',
104 'dogpile.core'
105 'dogpile.core'
105 ]
106 ]
106
107
107 if is_windows:
108 if is_windows:
108 pass
109 pass
109 else:
110 else:
110 requirements.append('psutil')
111 requirements.append('psutil')
111 requirements.append('py-bcrypt')
112 requirements.append('py-bcrypt')
112
113
113 test_requirements = [
114 test_requirements = [
114 'WebTest',
115 'WebTest',
115 'configobj',
116 'configobj',
116 'cssselect',
117 'cssselect',
117 'flake8',
118 'flake8',
118 'lxml',
119 'lxml',
119 'mock',
120 'mock',
120 'pytest',
121 'pytest',
121 'pytest-cov',
122 'pytest-cov',
122 'pytest-runner',
123 'pytest-runner',
123 ]
124 ]
124
125
125 setup_requirements = [
126 setup_requirements = [
126 'PasteScript',
127 'PasteScript',
127 'pytest-runner',
128 'pytest-runner',
128 ]
129 ]
129
130
130 dependency_links = [
131 dependency_links = [
131 ]
132 ]
132
133
133 classifiers = [
134 classifiers = [
134 'Development Status :: 6 - Mature',
135 'Development Status :: 6 - Mature',
135 'Environment :: Web Environment',
136 'Environment :: Web Environment',
136 'Framework :: Pylons',
137 'Framework :: Pylons',
137 'Intended Audience :: Developers',
138 'Intended Audience :: Developers',
138 'Operating System :: OS Independent',
139 'Operating System :: OS Independent',
139 'Programming Language :: Python',
140 'Programming Language :: Python',
140 'Programming Language :: Python :: 2.7',
141 'Programming Language :: Python :: 2.7',
141 ]
142 ]
142
143
143
144
144 # additional files from project that goes somewhere in the filesystem
145 # additional files from project that goes somewhere in the filesystem
145 # relative to sys.prefix
146 # relative to sys.prefix
146 data_files = []
147 data_files = []
147
148
148 # additional files that goes into package itself
149 # additional files that goes into package itself
149 package_data = {'rhodecode': ['i18n/*/LC_MESSAGES/*.mo', ], }
150 package_data = {'rhodecode': ['i18n/*/LC_MESSAGES/*.mo', ], }
150
151
151 description = ('RhodeCode is a fast and powerful management tool '
152 description = ('RhodeCode is a fast and powerful management tool '
152 'for Mercurial and GIT with a built in push/pull server, '
153 'for Mercurial and GIT with a built in push/pull server, '
153 'full text search and code-review.')
154 'full text search and code-review.')
154
155
155 keywords = ' '.join([
156 keywords = ' '.join([
156 'rhodecode', 'rhodiumcode', 'mercurial', 'git', 'code review',
157 'rhodecode', 'rhodiumcode', 'mercurial', 'git', 'code review',
157 'repo groups', 'ldap', 'repository management', 'hgweb replacement',
158 'repo groups', 'ldap', 'repository management', 'hgweb replacement',
158 'hgwebdir', 'gitweb replacement', 'serving hgweb',
159 'hgwebdir', 'gitweb replacement', 'serving hgweb',
159 ])
160 ])
160
161
161 # long description
162 # long description
162 README_FILE = 'README.rst'
163 README_FILE = 'README.rst'
163 CHANGELOG_FILE = 'CHANGES.rst'
164 CHANGELOG_FILE = 'CHANGES.rst'
164 try:
165 try:
165 long_description = open(README_FILE).read() + '\n\n' + \
166 long_description = open(README_FILE).read() + '\n\n' + \
166 open(CHANGELOG_FILE).read()
167 open(CHANGELOG_FILE).read()
167
168
168 except IOError, err:
169 except IOError, err:
169 sys.stderr.write(
170 sys.stderr.write(
170 '[WARNING] Cannot find file specified as long_description (%s)\n or '
171 '[WARNING] Cannot find file specified as long_description (%s)\n or '
171 'changelog (%s) skipping that file' % (README_FILE, CHANGELOG_FILE)
172 'changelog (%s) skipping that file' % (README_FILE, CHANGELOG_FILE)
172 )
173 )
173 long_description = description
174 long_description = description
174
175
175 # packages
176 # packages
176 packages = find_packages()
177 packages = find_packages()
177
178
178 paster_commands = [
179 paster_commands = [
179 'make-config=rhodecode.lib.paster_commands.make_config:Command',
180 'make-config=rhodecode.lib.paster_commands.make_config:Command',
180 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
181 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
181 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
182 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
182 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
183 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
183 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
185 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
185 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
186 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
186 ]
187 ]
187
188
188 setup(
189 setup(
189 name='rhodecode-enterprise-ce',
190 name='rhodecode-enterprise-ce',
190 version=__version__,
191 version=__version__,
191 description=description,
192 description=description,
192 long_description=long_description,
193 long_description=long_description,
193 keywords=keywords,
194 keywords=keywords,
194 license=__license__,
195 license=__license__,
195 author=__author__,
196 author=__author__,
196 author_email='marcin@rhodecode.com',
197 author_email='marcin@rhodecode.com',
197 dependency_links=dependency_links,
198 dependency_links=dependency_links,
198 url=__url__,
199 url=__url__,
199 install_requires=requirements,
200 install_requires=requirements,
200 tests_require=test_requirements,
201 tests_require=test_requirements,
201 classifiers=classifiers,
202 classifiers=classifiers,
202 setup_requires=setup_requirements,
203 setup_requires=setup_requirements,
203 data_files=data_files,
204 data_files=data_files,
204 packages=packages,
205 packages=packages,
205 include_package_data=True,
206 include_package_data=True,
206 package_data=package_data,
207 package_data=package_data,
207 message_extractors={
208 message_extractors={
208 'rhodecode': [
209 'rhodecode': [
209 ('**.py', 'python', None),
210 ('**.py', 'python', None),
210 ('**.js', 'javascript', None),
211 ('**.js', 'javascript', None),
211 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
212 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
212 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
213 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
213 ('public/**', 'ignore', None),
214 ('public/**', 'ignore', None),
214 ]
215 ]
215 },
216 },
216 zip_safe=False,
217 zip_safe=False,
217 paster_plugins=['PasteScript', 'Pylons'],
218 paster_plugins=['PasteScript', 'Pylons'],
218 entry_points={
219 entry_points={
219 'enterprise.plugins1': [
220 'enterprise.plugins1': [
220 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 ],
228 ],
228 'paste.app_factory': [
229 'paste.app_factory': [
229 'main=rhodecode.config.middleware:make_pyramid_app',
230 'main=rhodecode.config.middleware:make_pyramid_app',
230 'pylons=rhodecode.config.middleware:make_app',
231 'pylons=rhodecode.config.middleware:make_app',
231 ],
232 ],
232 'paste.app_install': [
233 'paste.app_install': [
233 'main=pylons.util:PylonsInstaller',
234 'main=pylons.util:PylonsInstaller',
234 'pylons=pylons.util:PylonsInstaller',
235 'pylons=pylons.util:PylonsInstaller',
235 ],
236 ],
236 'paste.global_paster_command': paster_commands,
237 'paste.global_paster_command': paster_commands,
237 'pytest11': [
238 'pytest11': [
238 'pylons=rhodecode.tests.pylons_plugin',
239 'pylons=rhodecode.tests.pylons_plugin',
239 'enterprise=rhodecode.tests.plugin',
240 'enterprise=rhodecode.tests.plugin',
240 ],
241 ],
241 'console_scripts': [
242 'console_scripts': [
242 'rcserver=rhodecode.rcserver:main',
243 'rcserver=rhodecode.rcserver:main',
243 ],
244 ],
244 'beaker.backends': [
245 'beaker.backends': [
245 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
246 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
246 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
247 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
247 ]
248 ]
248 },
249 },
249 )
250 )
General Comments 0
You need to be logged in to leave comments. Login now