##// END OF EJS Templates
events: add serialization .to_dict() to events based on marshmallow
dan -
r379:a86e0931 default
parent child Browse files
Show More
@@ -0,0 +1,69 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from datetime import datetime
20 from marshmallow import Schema, fields
21 from pyramid.threadlocal import get_current_request
22 from rhodecode.lib.utils2 import AttributeDict
23
24
25 SYSTEM_USER = AttributeDict(dict(
26 username='__SYSTEM__'
27 ))
28
29
30 class UserSchema(Schema):
31 """
32 Marshmallow schema for a user
33 """
34 username = fields.Str()
35
36
37 class RhodecodeEventSchema(Schema):
38 """
39 Marshmallow schema for a rhodecode event
40 """
41 utc_timestamp = fields.DateTime()
42 acting_user = fields.Nested(UserSchema)
43 acting_ip = fields.Str()
44
45
46 class RhodecodeEvent(object):
47 """
48 Base event class for all Rhodecode events
49 """
50 MarshmallowSchema = RhodecodeEventSchema
51
52 def __init__(self):
53 self.request = get_current_request()
54 self.utc_timestamp = datetime.utcnow()
55
56 @property
57 def acting_user(self):
58 if self.request:
59 return self.request.user.get_instance()
60 return SYSTEM_USER
61
62 @property
63 def acting_ip(self):
64 if self.request:
65 return self.request.user.ip_addr
66 return '<no ip available>'
67
68 def as_dict(self):
69 return self.MarshmallowSchema().dump(self).data No newline at end of file
@@ -1,1641 +1,1654 b''
1 1 {
2 2 Babel = super.buildPythonPackage {
3 3 name = "Babel-1.3";
4 4 buildInputs = with self; [];
5 5 doCheck = false;
6 6 propagatedBuildInputs = with self; [pytz];
7 7 src = fetchurl {
8 8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 10 };
11 11 meta = {
12 12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 13 };
14 14 };
15 15 Beaker = super.buildPythonPackage {
16 16 name = "Beaker-1.7.0";
17 17 buildInputs = with self; [];
18 18 doCheck = false;
19 19 propagatedBuildInputs = with self; [];
20 20 src = fetchurl {
21 21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
22 22 md5 = "386be3f7fe427358881eee4622b428b3";
23 23 };
24 24 meta = {
25 25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 26 };
27 27 };
28 28 CProfileV = super.buildPythonPackage {
29 29 name = "CProfileV-1.0.6";
30 30 buildInputs = with self; [];
31 31 doCheck = false;
32 32 propagatedBuildInputs = with self; [bottle];
33 33 src = fetchurl {
34 34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
35 35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
36 36 };
37 37 meta = {
38 38 license = [ pkgs.lib.licenses.mit ];
39 39 };
40 40 };
41 41 Fabric = super.buildPythonPackage {
42 42 name = "Fabric-1.10.0";
43 43 buildInputs = with self; [];
44 44 doCheck = false;
45 45 propagatedBuildInputs = with self; [paramiko];
46 46 src = fetchurl {
47 47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
48 48 md5 = "2cb96473387f0e7aa035210892352f4a";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 FormEncode = super.buildPythonPackage {
55 55 name = "FormEncode-1.2.4";
56 56 buildInputs = with self; [];
57 57 doCheck = false;
58 58 propagatedBuildInputs = with self; [];
59 59 src = fetchurl {
60 60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
61 61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
62 62 };
63 63 meta = {
64 64 license = [ pkgs.lib.licenses.psfl ];
65 65 };
66 66 };
67 67 Jinja2 = super.buildPythonPackage {
68 68 name = "Jinja2-2.7.3";
69 69 buildInputs = with self; [];
70 70 doCheck = false;
71 71 propagatedBuildInputs = with self; [MarkupSafe];
72 72 src = fetchurl {
73 73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
74 74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
75 75 };
76 76 meta = {
77 77 license = [ pkgs.lib.licenses.bsdOriginal ];
78 78 };
79 79 };
80 80 Mako = super.buildPythonPackage {
81 81 name = "Mako-1.0.1";
82 82 buildInputs = with self; [];
83 83 doCheck = false;
84 84 propagatedBuildInputs = with self; [MarkupSafe];
85 85 src = fetchurl {
86 86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
87 87 md5 = "9f0aafd177b039ef67b90ea350497a54";
88 88 };
89 89 meta = {
90 90 license = [ pkgs.lib.licenses.mit ];
91 91 };
92 92 };
93 93 Markdown = super.buildPythonPackage {
94 94 name = "Markdown-2.6.2";
95 95 buildInputs = with self; [];
96 96 doCheck = false;
97 97 propagatedBuildInputs = with self; [];
98 98 src = fetchurl {
99 99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
100 100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
101 101 };
102 102 meta = {
103 103 license = [ pkgs.lib.licenses.bsdOriginal ];
104 104 };
105 105 };
106 106 MarkupSafe = super.buildPythonPackage {
107 107 name = "MarkupSafe-0.23";
108 108 buildInputs = with self; [];
109 109 doCheck = false;
110 110 propagatedBuildInputs = with self; [];
111 111 src = fetchurl {
112 112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
113 113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
114 114 };
115 115 meta = {
116 116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 117 };
118 118 };
119 119 MySQL-python = super.buildPythonPackage {
120 120 name = "MySQL-python-1.2.5";
121 121 buildInputs = with self; [];
122 122 doCheck = false;
123 123 propagatedBuildInputs = with self; [];
124 124 src = fetchurl {
125 125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
126 126 md5 = "654f75b302db6ed8dc5a898c625e030c";
127 127 };
128 128 meta = {
129 129 license = [ pkgs.lib.licenses.gpl1 ];
130 130 };
131 131 };
132 132 Paste = super.buildPythonPackage {
133 133 name = "Paste-2.0.2";
134 134 buildInputs = with self; [];
135 135 doCheck = false;
136 136 propagatedBuildInputs = with self; [six];
137 137 src = fetchurl {
138 138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
139 139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
140 140 };
141 141 meta = {
142 142 license = [ pkgs.lib.licenses.mit ];
143 143 };
144 144 };
145 145 PasteDeploy = super.buildPythonPackage {
146 146 name = "PasteDeploy-1.5.2";
147 147 buildInputs = with self; [];
148 148 doCheck = false;
149 149 propagatedBuildInputs = with self; [];
150 150 src = fetchurl {
151 151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
152 152 md5 = "352b7205c78c8de4987578d19431af3b";
153 153 };
154 154 meta = {
155 155 license = [ pkgs.lib.licenses.mit ];
156 156 };
157 157 };
158 158 PasteScript = super.buildPythonPackage {
159 159 name = "PasteScript-1.7.5";
160 160 buildInputs = with self; [];
161 161 doCheck = false;
162 162 propagatedBuildInputs = with self; [Paste PasteDeploy];
163 163 src = fetchurl {
164 164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
165 165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
166 166 };
167 167 meta = {
168 168 license = [ pkgs.lib.licenses.mit ];
169 169 };
170 170 };
171 171 Pygments = super.buildPythonPackage {
172 172 name = "Pygments-2.1.3";
173 173 buildInputs = with self; [];
174 174 doCheck = false;
175 175 propagatedBuildInputs = with self; [];
176 176 src = fetchurl {
177 177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
178 178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 Pylons = super.buildPythonPackage {
185 185 name = "Pylons-1.0.1";
186 186 buildInputs = with self; [];
187 187 doCheck = false;
188 188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
189 189 src = fetchurl {
190 190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
191 191 md5 = "6cb880d75fa81213192142b07a6e4915";
192 192 };
193 193 meta = {
194 194 license = [ pkgs.lib.licenses.bsdOriginal ];
195 195 };
196 196 };
197 197 Pyro4 = super.buildPythonPackage {
198 198 name = "Pyro4-4.41";
199 199 buildInputs = with self; [];
200 200 doCheck = false;
201 201 propagatedBuildInputs = with self; [serpent];
202 202 src = fetchurl {
203 203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
204 204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
205 205 };
206 206 meta = {
207 207 license = [ pkgs.lib.licenses.mit ];
208 208 };
209 209 };
210 210 Routes = super.buildPythonPackage {
211 211 name = "Routes-1.13";
212 212 buildInputs = with self; [];
213 213 doCheck = false;
214 214 propagatedBuildInputs = with self; [repoze.lru];
215 215 src = fetchurl {
216 216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
217 217 md5 = "d527b0ab7dd9172b1275a41f97448783";
218 218 };
219 219 meta = {
220 220 license = [ pkgs.lib.licenses.bsdOriginal ];
221 221 };
222 222 };
223 223 SQLAlchemy = super.buildPythonPackage {
224 224 name = "SQLAlchemy-0.9.9";
225 225 buildInputs = with self; [];
226 226 doCheck = false;
227 227 propagatedBuildInputs = with self; [];
228 228 src = fetchurl {
229 229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
230 230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
231 231 };
232 232 meta = {
233 233 license = [ pkgs.lib.licenses.mit ];
234 234 };
235 235 };
236 236 Sphinx = super.buildPythonPackage {
237 237 name = "Sphinx-1.2.2";
238 238 buildInputs = with self; [];
239 239 doCheck = false;
240 240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
241 241 src = fetchurl {
242 242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
243 243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
244 244 };
245 245 meta = {
246 246 license = [ pkgs.lib.licenses.bsdOriginal ];
247 247 };
248 248 };
249 249 Tempita = super.buildPythonPackage {
250 250 name = "Tempita-0.5.2";
251 251 buildInputs = with self; [];
252 252 doCheck = false;
253 253 propagatedBuildInputs = with self; [];
254 254 src = fetchurl {
255 255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
256 256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
257 257 };
258 258 meta = {
259 259 license = [ pkgs.lib.licenses.mit ];
260 260 };
261 261 };
262 262 URLObject = super.buildPythonPackage {
263 263 name = "URLObject-2.4.0";
264 264 buildInputs = with self; [];
265 265 doCheck = false;
266 266 propagatedBuildInputs = with self; [];
267 267 src = fetchurl {
268 268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
269 269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
270 270 };
271 271 meta = {
272 272 license = [ ];
273 273 };
274 274 };
275 275 WebError = super.buildPythonPackage {
276 276 name = "WebError-0.10.3";
277 277 buildInputs = with self; [];
278 278 doCheck = false;
279 279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
280 280 src = fetchurl {
281 281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
282 282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
283 283 };
284 284 meta = {
285 285 license = [ pkgs.lib.licenses.mit ];
286 286 };
287 287 };
288 288 WebHelpers = super.buildPythonPackage {
289 289 name = "WebHelpers-1.3";
290 290 buildInputs = with self; [];
291 291 doCheck = false;
292 292 propagatedBuildInputs = with self; [MarkupSafe];
293 293 src = fetchurl {
294 294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
295 295 md5 = "32749ffadfc40fea51075a7def32588b";
296 296 };
297 297 meta = {
298 298 license = [ pkgs.lib.licenses.bsdOriginal ];
299 299 };
300 300 };
301 301 WebHelpers2 = super.buildPythonPackage {
302 302 name = "WebHelpers2-2.0";
303 303 buildInputs = with self; [];
304 304 doCheck = false;
305 305 propagatedBuildInputs = with self; [MarkupSafe six];
306 306 src = fetchurl {
307 307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
308 308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
309 309 };
310 310 meta = {
311 311 license = [ pkgs.lib.licenses.mit ];
312 312 };
313 313 };
314 314 WebOb = super.buildPythonPackage {
315 315 name = "WebOb-1.3.1";
316 316 buildInputs = with self; [];
317 317 doCheck = false;
318 318 propagatedBuildInputs = with self; [];
319 319 src = fetchurl {
320 320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
321 321 md5 = "20918251c5726956ba8fef22d1556177";
322 322 };
323 323 meta = {
324 324 license = [ pkgs.lib.licenses.mit ];
325 325 };
326 326 };
327 327 WebTest = super.buildPythonPackage {
328 328 name = "WebTest-1.4.3";
329 329 buildInputs = with self; [];
330 330 doCheck = false;
331 331 propagatedBuildInputs = with self; [WebOb];
332 332 src = fetchurl {
333 333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
334 334 md5 = "631ce728bed92c681a4020a36adbc353";
335 335 };
336 336 meta = {
337 337 license = [ pkgs.lib.licenses.mit ];
338 338 };
339 339 };
340 340 Whoosh = super.buildPythonPackage {
341 341 name = "Whoosh-2.7.0";
342 342 buildInputs = with self; [];
343 343 doCheck = false;
344 344 propagatedBuildInputs = with self; [];
345 345 src = fetchurl {
346 346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
347 347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
348 348 };
349 349 meta = {
350 350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
351 351 };
352 352 };
353 353 alembic = super.buildPythonPackage {
354 354 name = "alembic-0.8.4";
355 355 buildInputs = with self; [];
356 356 doCheck = false;
357 357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
358 358 src = fetchurl {
359 359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
360 360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
361 361 };
362 362 meta = {
363 363 license = [ pkgs.lib.licenses.mit ];
364 364 };
365 365 };
366 366 amqplib = super.buildPythonPackage {
367 367 name = "amqplib-1.0.2";
368 368 buildInputs = with self; [];
369 369 doCheck = false;
370 370 propagatedBuildInputs = with self; [];
371 371 src = fetchurl {
372 372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
373 373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
374 374 };
375 375 meta = {
376 376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
377 377 };
378 378 };
379 379 anyjson = super.buildPythonPackage {
380 380 name = "anyjson-0.3.3";
381 381 buildInputs = with self; [];
382 382 doCheck = false;
383 383 propagatedBuildInputs = with self; [];
384 384 src = fetchurl {
385 385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
386 386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
387 387 };
388 388 meta = {
389 389 license = [ pkgs.lib.licenses.bsdOriginal ];
390 390 };
391 391 };
392 392 appenlight-client = super.buildPythonPackage {
393 393 name = "appenlight-client-0.6.14";
394 394 buildInputs = with self; [];
395 395 doCheck = false;
396 396 propagatedBuildInputs = with self; [WebOb requests];
397 397 src = fetchurl {
398 398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
399 399 md5 = "578c69b09f4356d898fff1199b98a95c";
400 400 };
401 401 meta = {
402 402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
403 403 };
404 404 };
405 405 authomatic = super.buildPythonPackage {
406 406 name = "authomatic-0.1.0.post1";
407 407 buildInputs = with self; [];
408 408 doCheck = false;
409 409 propagatedBuildInputs = with self; [];
410 410 src = fetchurl {
411 411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
412 412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
413 413 };
414 414 meta = {
415 415 license = [ pkgs.lib.licenses.mit ];
416 416 };
417 417 };
418 418 backport-ipaddress = super.buildPythonPackage {
419 419 name = "backport-ipaddress-0.1";
420 420 buildInputs = with self; [];
421 421 doCheck = false;
422 422 propagatedBuildInputs = with self; [];
423 423 src = fetchurl {
424 424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
425 425 md5 = "9c1f45f4361f71b124d7293a60006c05";
426 426 };
427 427 meta = {
428 428 license = [ pkgs.lib.licenses.psfl ];
429 429 };
430 430 };
431 431 bottle = super.buildPythonPackage {
432 432 name = "bottle-0.12.8";
433 433 buildInputs = with self; [];
434 434 doCheck = false;
435 435 propagatedBuildInputs = with self; [];
436 436 src = fetchurl {
437 437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
438 438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
439 439 };
440 440 meta = {
441 441 license = [ pkgs.lib.licenses.mit ];
442 442 };
443 443 };
444 444 bumpversion = super.buildPythonPackage {
445 445 name = "bumpversion-0.5.3";
446 446 buildInputs = with self; [];
447 447 doCheck = false;
448 448 propagatedBuildInputs = with self; [];
449 449 src = fetchurl {
450 450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
451 451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
452 452 };
453 453 meta = {
454 454 license = [ pkgs.lib.licenses.mit ];
455 455 };
456 456 };
457 457 celery = super.buildPythonPackage {
458 458 name = "celery-2.2.10";
459 459 buildInputs = with self; [];
460 460 doCheck = false;
461 461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
462 462 src = fetchurl {
463 463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
464 464 md5 = "898bc87e54f278055b561316ba73e222";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 468 };
469 469 };
470 470 click = super.buildPythonPackage {
471 471 name = "click-5.1";
472 472 buildInputs = with self; [];
473 473 doCheck = false;
474 474 propagatedBuildInputs = with self; [];
475 475 src = fetchurl {
476 476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
477 477 md5 = "9c5323008cccfe232a8b161fc8196d41";
478 478 };
479 479 meta = {
480 480 license = [ pkgs.lib.licenses.bsdOriginal ];
481 481 };
482 482 };
483 483 colander = super.buildPythonPackage {
484 484 name = "colander-1.2";
485 485 buildInputs = with self; [];
486 486 doCheck = false;
487 487 propagatedBuildInputs = with self; [translationstring iso8601];
488 488 src = fetchurl {
489 489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
490 490 md5 = "83db21b07936a0726e588dae1914b9ed";
491 491 };
492 492 meta = {
493 493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
494 494 };
495 495 };
496 496 configobj = super.buildPythonPackage {
497 497 name = "configobj-5.0.6";
498 498 buildInputs = with self; [];
499 499 doCheck = false;
500 500 propagatedBuildInputs = with self; [six];
501 501 src = fetchurl {
502 502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
503 503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
504 504 };
505 505 meta = {
506 506 license = [ pkgs.lib.licenses.bsdOriginal ];
507 507 };
508 508 };
509 509 cov-core = super.buildPythonPackage {
510 510 name = "cov-core-1.15.0";
511 511 buildInputs = with self; [];
512 512 doCheck = false;
513 513 propagatedBuildInputs = with self; [coverage];
514 514 src = fetchurl {
515 515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
516 516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
517 517 };
518 518 meta = {
519 519 license = [ pkgs.lib.licenses.mit ];
520 520 };
521 521 };
522 522 coverage = super.buildPythonPackage {
523 523 name = "coverage-3.7.1";
524 524 buildInputs = with self; [];
525 525 doCheck = false;
526 526 propagatedBuildInputs = with self; [];
527 527 src = fetchurl {
528 528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
529 529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
530 530 };
531 531 meta = {
532 532 license = [ pkgs.lib.licenses.bsdOriginal ];
533 533 };
534 534 };
535 535 cssselect = super.buildPythonPackage {
536 536 name = "cssselect-0.9.1";
537 537 buildInputs = with self; [];
538 538 doCheck = false;
539 539 propagatedBuildInputs = with self; [];
540 540 src = fetchurl {
541 541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
542 542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
543 543 };
544 544 meta = {
545 545 license = [ pkgs.lib.licenses.bsdOriginal ];
546 546 };
547 547 };
548 548 decorator = super.buildPythonPackage {
549 549 name = "decorator-3.4.2";
550 550 buildInputs = with self; [];
551 551 doCheck = false;
552 552 propagatedBuildInputs = with self; [];
553 553 src = fetchurl {
554 554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
555 555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
556 556 };
557 557 meta = {
558 558 license = [ pkgs.lib.licenses.bsdOriginal ];
559 559 };
560 560 };
561 561 docutils = super.buildPythonPackage {
562 562 name = "docutils-0.12";
563 563 buildInputs = with self; [];
564 564 doCheck = false;
565 565 propagatedBuildInputs = with self; [];
566 566 src = fetchurl {
567 567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
568 568 md5 = "4622263b62c5c771c03502afa3157768";
569 569 };
570 570 meta = {
571 571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
572 572 };
573 573 };
574 574 dogpile.cache = super.buildPythonPackage {
575 575 name = "dogpile.cache-0.6.1";
576 576 buildInputs = with self; [];
577 577 doCheck = false;
578 578 propagatedBuildInputs = with self; [dogpile.core];
579 579 src = fetchurl {
580 580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
581 581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
582 582 };
583 583 meta = {
584 584 license = [ pkgs.lib.licenses.bsdOriginal ];
585 585 };
586 586 };
587 587 dogpile.core = super.buildPythonPackage {
588 588 name = "dogpile.core-0.4.1";
589 589 buildInputs = with self; [];
590 590 doCheck = false;
591 591 propagatedBuildInputs = with self; [];
592 592 src = fetchurl {
593 593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
594 594 md5 = "01cb19f52bba3e95c9b560f39341f045";
595 595 };
596 596 meta = {
597 597 license = [ pkgs.lib.licenses.bsdOriginal ];
598 598 };
599 599 };
600 600 dulwich = super.buildPythonPackage {
601 601 name = "dulwich-0.12.0";
602 602 buildInputs = with self; [];
603 603 doCheck = false;
604 604 propagatedBuildInputs = with self; [];
605 605 src = fetchurl {
606 606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
607 607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
608 608 };
609 609 meta = {
610 610 license = [ pkgs.lib.licenses.gpl2Plus ];
611 611 };
612 612 };
613 613 ecdsa = super.buildPythonPackage {
614 614 name = "ecdsa-0.11";
615 615 buildInputs = with self; [];
616 616 doCheck = false;
617 617 propagatedBuildInputs = with self; [];
618 618 src = fetchurl {
619 619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
620 620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
621 621 };
622 622 meta = {
623 623 license = [ pkgs.lib.licenses.mit ];
624 624 };
625 625 };
626 626 elasticsearch = super.buildPythonPackage {
627 627 name = "elasticsearch-2.3.0";
628 628 buildInputs = with self; [];
629 629 doCheck = false;
630 630 propagatedBuildInputs = with self; [urllib3];
631 631 src = fetchurl {
632 632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
633 633 md5 = "2550f3b51629cf1ef9636608af92c340";
634 634 };
635 635 meta = {
636 636 license = [ pkgs.lib.licenses.asl20 ];
637 637 };
638 638 };
639 639 elasticsearch-dsl = super.buildPythonPackage {
640 640 name = "elasticsearch-dsl-2.0.0";
641 641 buildInputs = with self; [];
642 642 doCheck = false;
643 643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
644 644 src = fetchurl {
645 645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
646 646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
647 647 };
648 648 meta = {
649 649 license = [ pkgs.lib.licenses.asl20 ];
650 650 };
651 651 };
652 652 flake8 = super.buildPythonPackage {
653 653 name = "flake8-2.4.1";
654 654 buildInputs = with self; [];
655 655 doCheck = false;
656 656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
657 657 src = fetchurl {
658 658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
659 659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
660 660 };
661 661 meta = {
662 662 license = [ pkgs.lib.licenses.mit ];
663 663 };
664 664 };
665 665 future = super.buildPythonPackage {
666 666 name = "future-0.14.3";
667 667 buildInputs = with self; [];
668 668 doCheck = false;
669 669 propagatedBuildInputs = with self; [];
670 670 src = fetchurl {
671 671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
672 672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
673 673 };
674 674 meta = {
675 675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
676 676 };
677 677 };
678 678 futures = super.buildPythonPackage {
679 679 name = "futures-3.0.2";
680 680 buildInputs = with self; [];
681 681 doCheck = false;
682 682 propagatedBuildInputs = with self; [];
683 683 src = fetchurl {
684 684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
685 685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
686 686 };
687 687 meta = {
688 688 license = [ pkgs.lib.licenses.bsdOriginal ];
689 689 };
690 690 };
691 691 gnureadline = super.buildPythonPackage {
692 692 name = "gnureadline-6.3.3";
693 693 buildInputs = with self; [];
694 694 doCheck = false;
695 695 propagatedBuildInputs = with self; [];
696 696 src = fetchurl {
697 697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
698 698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
699 699 };
700 700 meta = {
701 701 license = [ pkgs.lib.licenses.gpl1 ];
702 702 };
703 703 };
704 704 gprof2dot = super.buildPythonPackage {
705 705 name = "gprof2dot-2015.12.1";
706 706 buildInputs = with self; [];
707 707 doCheck = false;
708 708 propagatedBuildInputs = with self; [];
709 709 src = fetchurl {
710 710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
711 711 md5 = "e23bf4e2f94db032750c193384b4165b";
712 712 };
713 713 meta = {
714 714 license = [ { fullName = "LGPL"; } ];
715 715 };
716 716 };
717 717 gunicorn = super.buildPythonPackage {
718 718 name = "gunicorn-19.6.0";
719 719 buildInputs = with self; [];
720 720 doCheck = false;
721 721 propagatedBuildInputs = with self; [];
722 722 src = fetchurl {
723 723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
724 724 md5 = "338e5e8a83ea0f0625f768dba4597530";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 infrae.cache = super.buildPythonPackage {
731 731 name = "infrae.cache-1.0.1";
732 732 buildInputs = with self; [];
733 733 doCheck = false;
734 734 propagatedBuildInputs = with self; [Beaker repoze.lru];
735 735 src = fetchurl {
736 736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
737 737 md5 = "b09076a766747e6ed2a755cc62088e32";
738 738 };
739 739 meta = {
740 740 license = [ pkgs.lib.licenses.zpt21 ];
741 741 };
742 742 };
743 743 invoke = super.buildPythonPackage {
744 744 name = "invoke-0.13.0";
745 745 buildInputs = with self; [];
746 746 doCheck = false;
747 747 propagatedBuildInputs = with self; [];
748 748 src = fetchurl {
749 749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
750 750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
751 751 };
752 752 meta = {
753 753 license = [ pkgs.lib.licenses.bsdOriginal ];
754 754 };
755 755 };
756 756 ipdb = super.buildPythonPackage {
757 757 name = "ipdb-0.8";
758 758 buildInputs = with self; [];
759 759 doCheck = false;
760 760 propagatedBuildInputs = with self; [ipython];
761 761 src = fetchurl {
762 762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
763 763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.gpl1 ];
767 767 };
768 768 };
769 769 ipython = super.buildPythonPackage {
770 770 name = "ipython-3.1.0";
771 771 buildInputs = with self; [];
772 772 doCheck = false;
773 773 propagatedBuildInputs = with self; [];
774 774 src = fetchurl {
775 775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
776 776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
777 777 };
778 778 meta = {
779 779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 780 };
781 781 };
782 782 iso8601 = super.buildPythonPackage {
783 783 name = "iso8601-0.1.11";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
789 789 md5 = "b06d11cd14a64096f907086044f0fe38";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.mit ];
793 793 };
794 794 };
795 795 itsdangerous = super.buildPythonPackage {
796 796 name = "itsdangerous-0.24";
797 797 buildInputs = with self; [];
798 798 doCheck = false;
799 799 propagatedBuildInputs = with self; [];
800 800 src = fetchurl {
801 801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
802 802 md5 = "a3d55aa79369aef5345c036a8a26307f";
803 803 };
804 804 meta = {
805 805 license = [ pkgs.lib.licenses.bsdOriginal ];
806 806 };
807 807 };
808 808 kombu = super.buildPythonPackage {
809 809 name = "kombu-1.5.1";
810 810 buildInputs = with self; [];
811 811 doCheck = false;
812 812 propagatedBuildInputs = with self; [anyjson amqplib];
813 813 src = fetchurl {
814 814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
815 815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 819 };
820 820 };
821 821 lxml = super.buildPythonPackage {
822 822 name = "lxml-3.4.4";
823 823 buildInputs = with self; [];
824 824 doCheck = false;
825 825 propagatedBuildInputs = with self; [];
826 826 src = fetchurl {
827 827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
828 828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
829 829 };
830 830 meta = {
831 831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 832 };
833 833 };
834 marshmallow = super.buildPythonPackage {
835 name = "marshmallow-2.8.0";
836 buildInputs = with self; [];
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/4f/64/9393d77847d86981c84b88bbea627d30ff71b5ab1402636b366f73737817/marshmallow-2.8.0.tar.gz";
841 md5 = "204513fc123a3d9bdd7b63b9747f02e6";
842 };
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
845 };
846 };
834 847 mccabe = super.buildPythonPackage {
835 848 name = "mccabe-0.3";
836 849 buildInputs = with self; [];
837 850 doCheck = false;
838 851 propagatedBuildInputs = with self; [];
839 852 src = fetchurl {
840 853 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
841 854 md5 = "81640948ff226f8c12b3277059489157";
842 855 };
843 856 meta = {
844 857 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
845 858 };
846 859 };
847 860 meld3 = super.buildPythonPackage {
848 861 name = "meld3-1.0.2";
849 862 buildInputs = with self; [];
850 863 doCheck = false;
851 864 propagatedBuildInputs = with self; [];
852 865 src = fetchurl {
853 866 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
854 867 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
855 868 };
856 869 meta = {
857 870 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
858 871 };
859 872 };
860 873 mock = super.buildPythonPackage {
861 874 name = "mock-1.0.1";
862 875 buildInputs = with self; [];
863 876 doCheck = false;
864 877 propagatedBuildInputs = with self; [];
865 878 src = fetchurl {
866 879 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
867 880 md5 = "869f08d003c289a97c1a6610faf5e913";
868 881 };
869 882 meta = {
870 883 license = [ pkgs.lib.licenses.bsdOriginal ];
871 884 };
872 885 };
873 886 msgpack-python = super.buildPythonPackage {
874 887 name = "msgpack-python-0.4.6";
875 888 buildInputs = with self; [];
876 889 doCheck = false;
877 890 propagatedBuildInputs = with self; [];
878 891 src = fetchurl {
879 892 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
880 893 md5 = "8b317669314cf1bc881716cccdaccb30";
881 894 };
882 895 meta = {
883 896 license = [ pkgs.lib.licenses.asl20 ];
884 897 };
885 898 };
886 899 nose = super.buildPythonPackage {
887 900 name = "nose-1.3.6";
888 901 buildInputs = with self; [];
889 902 doCheck = false;
890 903 propagatedBuildInputs = with self; [];
891 904 src = fetchurl {
892 905 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
893 906 md5 = "0ca546d81ca8309080fc80cb389e7a16";
894 907 };
895 908 meta = {
896 909 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
897 910 };
898 911 };
899 912 objgraph = super.buildPythonPackage {
900 913 name = "objgraph-2.0.0";
901 914 buildInputs = with self; [];
902 915 doCheck = false;
903 916 propagatedBuildInputs = with self; [];
904 917 src = fetchurl {
905 918 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
906 919 md5 = "25b0d5e5adc74aa63ead15699614159c";
907 920 };
908 921 meta = {
909 922 license = [ pkgs.lib.licenses.mit ];
910 923 };
911 924 };
912 925 packaging = super.buildPythonPackage {
913 926 name = "packaging-15.2";
914 927 buildInputs = with self; [];
915 928 doCheck = false;
916 929 propagatedBuildInputs = with self; [];
917 930 src = fetchurl {
918 931 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
919 932 md5 = "c16093476f6ced42128bf610e5db3784";
920 933 };
921 934 meta = {
922 935 license = [ pkgs.lib.licenses.asl20 ];
923 936 };
924 937 };
925 938 paramiko = super.buildPythonPackage {
926 939 name = "paramiko-1.15.1";
927 940 buildInputs = with self; [];
928 941 doCheck = false;
929 942 propagatedBuildInputs = with self; [pycrypto ecdsa];
930 943 src = fetchurl {
931 944 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
932 945 md5 = "48c274c3f9b1282932567b21f6acf3b5";
933 946 };
934 947 meta = {
935 948 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
936 949 };
937 950 };
938 951 pep8 = super.buildPythonPackage {
939 952 name = "pep8-1.5.7";
940 953 buildInputs = with self; [];
941 954 doCheck = false;
942 955 propagatedBuildInputs = with self; [];
943 956 src = fetchurl {
944 957 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
945 958 md5 = "f6adbdd69365ecca20513c709f9b7c93";
946 959 };
947 960 meta = {
948 961 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
949 962 };
950 963 };
951 964 psutil = super.buildPythonPackage {
952 965 name = "psutil-2.2.1";
953 966 buildInputs = with self; [];
954 967 doCheck = false;
955 968 propagatedBuildInputs = with self; [];
956 969 src = fetchurl {
957 970 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
958 971 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
959 972 };
960 973 meta = {
961 974 license = [ pkgs.lib.licenses.bsdOriginal ];
962 975 };
963 976 };
964 977 psycopg2 = super.buildPythonPackage {
965 978 name = "psycopg2-2.6.1";
966 979 buildInputs = with self; [];
967 980 doCheck = false;
968 981 propagatedBuildInputs = with self; [];
969 982 src = fetchurl {
970 983 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
971 984 md5 = "842b44f8c95517ed5b792081a2370da1";
972 985 };
973 986 meta = {
974 987 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
975 988 };
976 989 };
977 990 py = super.buildPythonPackage {
978 991 name = "py-1.4.29";
979 992 buildInputs = with self; [];
980 993 doCheck = false;
981 994 propagatedBuildInputs = with self; [];
982 995 src = fetchurl {
983 996 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
984 997 md5 = "c28e0accba523a29b35a48bb703fb96c";
985 998 };
986 999 meta = {
987 1000 license = [ pkgs.lib.licenses.mit ];
988 1001 };
989 1002 };
990 1003 py-bcrypt = super.buildPythonPackage {
991 1004 name = "py-bcrypt-0.4";
992 1005 buildInputs = with self; [];
993 1006 doCheck = false;
994 1007 propagatedBuildInputs = with self; [];
995 1008 src = fetchurl {
996 1009 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
997 1010 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
998 1011 };
999 1012 meta = {
1000 1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1001 1014 };
1002 1015 };
1003 1016 py-gfm = super.buildPythonPackage {
1004 1017 name = "py-gfm-0.1.3";
1005 1018 buildInputs = with self; [];
1006 1019 doCheck = false;
1007 1020 propagatedBuildInputs = with self; [setuptools Markdown];
1008 1021 src = fetchurl {
1009 1022 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1010 1023 md5 = "e588d9e69640a241b97e2c59c22527a6";
1011 1024 };
1012 1025 meta = {
1013 1026 license = [ pkgs.lib.licenses.bsdOriginal ];
1014 1027 };
1015 1028 };
1016 1029 pycrypto = super.buildPythonPackage {
1017 1030 name = "pycrypto-2.6.1";
1018 1031 buildInputs = with self; [];
1019 1032 doCheck = false;
1020 1033 propagatedBuildInputs = with self; [];
1021 1034 src = fetchurl {
1022 1035 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1023 1036 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1024 1037 };
1025 1038 meta = {
1026 1039 license = [ pkgs.lib.licenses.publicDomain ];
1027 1040 };
1028 1041 };
1029 1042 pycurl = super.buildPythonPackage {
1030 1043 name = "pycurl-7.19.5";
1031 1044 buildInputs = with self; [];
1032 1045 doCheck = false;
1033 1046 propagatedBuildInputs = with self; [];
1034 1047 src = fetchurl {
1035 1048 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1036 1049 md5 = "47b4eac84118e2606658122104e62072";
1037 1050 };
1038 1051 meta = {
1039 1052 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1040 1053 };
1041 1054 };
1042 1055 pyflakes = super.buildPythonPackage {
1043 1056 name = "pyflakes-0.8.1";
1044 1057 buildInputs = with self; [];
1045 1058 doCheck = false;
1046 1059 propagatedBuildInputs = with self; [];
1047 1060 src = fetchurl {
1048 1061 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1049 1062 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1050 1063 };
1051 1064 meta = {
1052 1065 license = [ pkgs.lib.licenses.mit ];
1053 1066 };
1054 1067 };
1055 1068 pyparsing = super.buildPythonPackage {
1056 1069 name = "pyparsing-1.5.7";
1057 1070 buildInputs = with self; [];
1058 1071 doCheck = false;
1059 1072 propagatedBuildInputs = with self; [];
1060 1073 src = fetchurl {
1061 1074 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1062 1075 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1063 1076 };
1064 1077 meta = {
1065 1078 license = [ pkgs.lib.licenses.mit ];
1066 1079 };
1067 1080 };
1068 1081 pyramid = super.buildPythonPackage {
1069 1082 name = "pyramid-1.6.1";
1070 1083 buildInputs = with self; [];
1071 1084 doCheck = false;
1072 1085 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1073 1086 src = fetchurl {
1074 1087 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1075 1088 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1076 1089 };
1077 1090 meta = {
1078 1091 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1079 1092 };
1080 1093 };
1081 1094 pyramid-beaker = super.buildPythonPackage {
1082 1095 name = "pyramid-beaker-0.8";
1083 1096 buildInputs = with self; [];
1084 1097 doCheck = false;
1085 1098 propagatedBuildInputs = with self; [pyramid Beaker];
1086 1099 src = fetchurl {
1087 1100 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1088 1101 md5 = "22f14be31b06549f80890e2c63a93834";
1089 1102 };
1090 1103 meta = {
1091 1104 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1092 1105 };
1093 1106 };
1094 1107 pyramid-debugtoolbar = super.buildPythonPackage {
1095 1108 name = "pyramid-debugtoolbar-2.4.2";
1096 1109 buildInputs = with self; [];
1097 1110 doCheck = false;
1098 1111 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1099 1112 src = fetchurl {
1100 1113 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1101 1114 md5 = "073ea67086cc4bd5decc3a000853642d";
1102 1115 };
1103 1116 meta = {
1104 1117 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1105 1118 };
1106 1119 };
1107 1120 pyramid-jinja2 = super.buildPythonPackage {
1108 1121 name = "pyramid-jinja2-2.5";
1109 1122 buildInputs = with self; [];
1110 1123 doCheck = false;
1111 1124 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1112 1125 src = fetchurl {
1113 1126 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1114 1127 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1115 1128 };
1116 1129 meta = {
1117 1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1118 1131 };
1119 1132 };
1120 1133 pyramid-mako = super.buildPythonPackage {
1121 1134 name = "pyramid-mako-1.0.2";
1122 1135 buildInputs = with self; [];
1123 1136 doCheck = false;
1124 1137 propagatedBuildInputs = with self; [pyramid Mako];
1125 1138 src = fetchurl {
1126 1139 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1127 1140 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1128 1141 };
1129 1142 meta = {
1130 1143 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1131 1144 };
1132 1145 };
1133 1146 pysqlite = super.buildPythonPackage {
1134 1147 name = "pysqlite-2.6.3";
1135 1148 buildInputs = with self; [];
1136 1149 doCheck = false;
1137 1150 propagatedBuildInputs = with self; [];
1138 1151 src = fetchurl {
1139 1152 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1140 1153 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1141 1154 };
1142 1155 meta = {
1143 1156 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1144 1157 };
1145 1158 };
1146 1159 pytest = super.buildPythonPackage {
1147 1160 name = "pytest-2.8.5";
1148 1161 buildInputs = with self; [];
1149 1162 doCheck = false;
1150 1163 propagatedBuildInputs = with self; [py];
1151 1164 src = fetchurl {
1152 1165 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1153 1166 md5 = "8493b06f700862f1294298d6c1b715a9";
1154 1167 };
1155 1168 meta = {
1156 1169 license = [ pkgs.lib.licenses.mit ];
1157 1170 };
1158 1171 };
1159 1172 pytest-catchlog = super.buildPythonPackage {
1160 1173 name = "pytest-catchlog-1.2.2";
1161 1174 buildInputs = with self; [];
1162 1175 doCheck = false;
1163 1176 propagatedBuildInputs = with self; [py pytest];
1164 1177 src = fetchurl {
1165 1178 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1166 1179 md5 = "09d890c54c7456c818102b7ff8c182c8";
1167 1180 };
1168 1181 meta = {
1169 1182 license = [ pkgs.lib.licenses.mit ];
1170 1183 };
1171 1184 };
1172 1185 pytest-cov = super.buildPythonPackage {
1173 1186 name = "pytest-cov-1.8.1";
1174 1187 buildInputs = with self; [];
1175 1188 doCheck = false;
1176 1189 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1177 1190 src = fetchurl {
1178 1191 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1179 1192 md5 = "76c778afa2494088270348be42d759fc";
1180 1193 };
1181 1194 meta = {
1182 1195 license = [ pkgs.lib.licenses.mit ];
1183 1196 };
1184 1197 };
1185 1198 pytest-profiling = super.buildPythonPackage {
1186 1199 name = "pytest-profiling-1.0.1";
1187 1200 buildInputs = with self; [];
1188 1201 doCheck = false;
1189 1202 propagatedBuildInputs = with self; [six pytest gprof2dot];
1190 1203 src = fetchurl {
1191 1204 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1192 1205 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1193 1206 };
1194 1207 meta = {
1195 1208 license = [ pkgs.lib.licenses.mit ];
1196 1209 };
1197 1210 };
1198 1211 pytest-runner = super.buildPythonPackage {
1199 1212 name = "pytest-runner-2.7.1";
1200 1213 buildInputs = with self; [];
1201 1214 doCheck = false;
1202 1215 propagatedBuildInputs = with self; [];
1203 1216 src = fetchurl {
1204 1217 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1205 1218 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1206 1219 };
1207 1220 meta = {
1208 1221 license = [ pkgs.lib.licenses.mit ];
1209 1222 };
1210 1223 };
1211 1224 pytest-timeout = super.buildPythonPackage {
1212 1225 name = "pytest-timeout-0.4";
1213 1226 buildInputs = with self; [];
1214 1227 doCheck = false;
1215 1228 propagatedBuildInputs = with self; [pytest];
1216 1229 src = fetchurl {
1217 1230 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1218 1231 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1219 1232 };
1220 1233 meta = {
1221 1234 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1222 1235 };
1223 1236 };
1224 1237 python-dateutil = super.buildPythonPackage {
1225 1238 name = "python-dateutil-1.5";
1226 1239 buildInputs = with self; [];
1227 1240 doCheck = false;
1228 1241 propagatedBuildInputs = with self; [];
1229 1242 src = fetchurl {
1230 1243 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1231 1244 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1232 1245 };
1233 1246 meta = {
1234 1247 license = [ pkgs.lib.licenses.psfl ];
1235 1248 };
1236 1249 };
1237 1250 python-editor = super.buildPythonPackage {
1238 1251 name = "python-editor-1.0.1";
1239 1252 buildInputs = with self; [];
1240 1253 doCheck = false;
1241 1254 propagatedBuildInputs = with self; [];
1242 1255 src = fetchurl {
1243 1256 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1244 1257 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1245 1258 };
1246 1259 meta = {
1247 1260 license = [ pkgs.lib.licenses.asl20 ];
1248 1261 };
1249 1262 };
1250 1263 python-ldap = super.buildPythonPackage {
1251 1264 name = "python-ldap-2.4.19";
1252 1265 buildInputs = with self; [];
1253 1266 doCheck = false;
1254 1267 propagatedBuildInputs = with self; [setuptools];
1255 1268 src = fetchurl {
1256 1269 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1257 1270 md5 = "b941bf31d09739492aa19ef679e94ae3";
1258 1271 };
1259 1272 meta = {
1260 1273 license = [ pkgs.lib.licenses.psfl ];
1261 1274 };
1262 1275 };
1263 1276 python-memcached = super.buildPythonPackage {
1264 1277 name = "python-memcached-1.57";
1265 1278 buildInputs = with self; [];
1266 1279 doCheck = false;
1267 1280 propagatedBuildInputs = with self; [six];
1268 1281 src = fetchurl {
1269 1282 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1270 1283 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1271 1284 };
1272 1285 meta = {
1273 1286 license = [ pkgs.lib.licenses.psfl ];
1274 1287 };
1275 1288 };
1276 1289 python-pam = super.buildPythonPackage {
1277 1290 name = "python-pam-1.8.2";
1278 1291 buildInputs = with self; [];
1279 1292 doCheck = false;
1280 1293 propagatedBuildInputs = with self; [];
1281 1294 src = fetchurl {
1282 1295 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1283 1296 md5 = "db71b6b999246fb05d78ecfbe166629d";
1284 1297 };
1285 1298 meta = {
1286 1299 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1287 1300 };
1288 1301 };
1289 1302 pytz = super.buildPythonPackage {
1290 1303 name = "pytz-2015.4";
1291 1304 buildInputs = with self; [];
1292 1305 doCheck = false;
1293 1306 propagatedBuildInputs = with self; [];
1294 1307 src = fetchurl {
1295 1308 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1296 1309 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1297 1310 };
1298 1311 meta = {
1299 1312 license = [ pkgs.lib.licenses.mit ];
1300 1313 };
1301 1314 };
1302 1315 pyzmq = super.buildPythonPackage {
1303 1316 name = "pyzmq-14.6.0";
1304 1317 buildInputs = with self; [];
1305 1318 doCheck = false;
1306 1319 propagatedBuildInputs = with self; [];
1307 1320 src = fetchurl {
1308 1321 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1309 1322 md5 = "395b5de95a931afa5b14c9349a5b8024";
1310 1323 };
1311 1324 meta = {
1312 1325 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1313 1326 };
1314 1327 };
1315 1328 recaptcha-client = super.buildPythonPackage {
1316 1329 name = "recaptcha-client-1.0.6";
1317 1330 buildInputs = with self; [];
1318 1331 doCheck = false;
1319 1332 propagatedBuildInputs = with self; [];
1320 1333 src = fetchurl {
1321 1334 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1322 1335 md5 = "74228180f7e1fb76c4d7089160b0d919";
1323 1336 };
1324 1337 meta = {
1325 1338 license = [ { fullName = "MIT/X11"; } ];
1326 1339 };
1327 1340 };
1328 1341 repoze.lru = super.buildPythonPackage {
1329 1342 name = "repoze.lru-0.6";
1330 1343 buildInputs = with self; [];
1331 1344 doCheck = false;
1332 1345 propagatedBuildInputs = with self; [];
1333 1346 src = fetchurl {
1334 1347 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1335 1348 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1336 1349 };
1337 1350 meta = {
1338 1351 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1339 1352 };
1340 1353 };
1341 1354 requests = super.buildPythonPackage {
1342 1355 name = "requests-2.9.1";
1343 1356 buildInputs = with self; [];
1344 1357 doCheck = false;
1345 1358 propagatedBuildInputs = with self; [];
1346 1359 src = fetchurl {
1347 1360 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1348 1361 md5 = "0b7f480d19012ec52bab78292efd976d";
1349 1362 };
1350 1363 meta = {
1351 1364 license = [ pkgs.lib.licenses.asl20 ];
1352 1365 };
1353 1366 };
1354 1367 rhodecode-enterprise-ce = super.buildPythonPackage {
1355 1368 name = "rhodecode-enterprise-ce-4.3.0";
1356 1369 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1357 1370 doCheck = true;
1358 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1371 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu marshmallow msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1359 1372 src = ./.;
1360 1373 meta = {
1361 1374 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1362 1375 };
1363 1376 };
1364 1377 rhodecode-tools = super.buildPythonPackage {
1365 1378 name = "rhodecode-tools-0.8.3";
1366 1379 buildInputs = with self; [];
1367 1380 doCheck = false;
1368 1381 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1369 1382 src = fetchurl {
1370 1383 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1371 1384 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1372 1385 };
1373 1386 meta = {
1374 1387 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1375 1388 };
1376 1389 };
1377 1390 serpent = super.buildPythonPackage {
1378 1391 name = "serpent-1.12";
1379 1392 buildInputs = with self; [];
1380 1393 doCheck = false;
1381 1394 propagatedBuildInputs = with self; [];
1382 1395 src = fetchurl {
1383 1396 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1384 1397 md5 = "05869ac7b062828b34f8f927f0457b65";
1385 1398 };
1386 1399 meta = {
1387 1400 license = [ pkgs.lib.licenses.mit ];
1388 1401 };
1389 1402 };
1390 1403 setproctitle = super.buildPythonPackage {
1391 1404 name = "setproctitle-1.1.8";
1392 1405 buildInputs = with self; [];
1393 1406 doCheck = false;
1394 1407 propagatedBuildInputs = with self; [];
1395 1408 src = fetchurl {
1396 1409 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1397 1410 md5 = "728f4c8c6031bbe56083a48594027edd";
1398 1411 };
1399 1412 meta = {
1400 1413 license = [ pkgs.lib.licenses.bsdOriginal ];
1401 1414 };
1402 1415 };
1403 1416 setuptools = super.buildPythonPackage {
1404 1417 name = "setuptools-20.8.1";
1405 1418 buildInputs = with self; [];
1406 1419 doCheck = false;
1407 1420 propagatedBuildInputs = with self; [];
1408 1421 src = fetchurl {
1409 1422 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1410 1423 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1411 1424 };
1412 1425 meta = {
1413 1426 license = [ pkgs.lib.licenses.mit ];
1414 1427 };
1415 1428 };
1416 1429 setuptools-scm = super.buildPythonPackage {
1417 1430 name = "setuptools-scm-1.11.0";
1418 1431 buildInputs = with self; [];
1419 1432 doCheck = false;
1420 1433 propagatedBuildInputs = with self; [];
1421 1434 src = fetchurl {
1422 1435 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1423 1436 md5 = "4c5c896ba52e134bbc3507bac6400087";
1424 1437 };
1425 1438 meta = {
1426 1439 license = [ pkgs.lib.licenses.mit ];
1427 1440 };
1428 1441 };
1429 1442 simplejson = super.buildPythonPackage {
1430 1443 name = "simplejson-3.7.2";
1431 1444 buildInputs = with self; [];
1432 1445 doCheck = false;
1433 1446 propagatedBuildInputs = with self; [];
1434 1447 src = fetchurl {
1435 1448 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1436 1449 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1437 1450 };
1438 1451 meta = {
1439 1452 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1440 1453 };
1441 1454 };
1442 1455 six = super.buildPythonPackage {
1443 1456 name = "six-1.9.0";
1444 1457 buildInputs = with self; [];
1445 1458 doCheck = false;
1446 1459 propagatedBuildInputs = with self; [];
1447 1460 src = fetchurl {
1448 1461 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1449 1462 md5 = "476881ef4012262dfc8adc645ee786c4";
1450 1463 };
1451 1464 meta = {
1452 1465 license = [ pkgs.lib.licenses.mit ];
1453 1466 };
1454 1467 };
1455 1468 subprocess32 = super.buildPythonPackage {
1456 1469 name = "subprocess32-3.2.6";
1457 1470 buildInputs = with self; [];
1458 1471 doCheck = false;
1459 1472 propagatedBuildInputs = with self; [];
1460 1473 src = fetchurl {
1461 1474 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1462 1475 md5 = "754c5ab9f533e764f931136974b618f1";
1463 1476 };
1464 1477 meta = {
1465 1478 license = [ pkgs.lib.licenses.psfl ];
1466 1479 };
1467 1480 };
1468 1481 supervisor = super.buildPythonPackage {
1469 1482 name = "supervisor-3.3.0";
1470 1483 buildInputs = with self; [];
1471 1484 doCheck = false;
1472 1485 propagatedBuildInputs = with self; [meld3];
1473 1486 src = fetchurl {
1474 1487 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1475 1488 md5 = "46bac00378d1eddb616752b990c67416";
1476 1489 };
1477 1490 meta = {
1478 1491 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1479 1492 };
1480 1493 };
1481 1494 transifex-client = super.buildPythonPackage {
1482 1495 name = "transifex-client-0.10";
1483 1496 buildInputs = with self; [];
1484 1497 doCheck = false;
1485 1498 propagatedBuildInputs = with self; [];
1486 1499 src = fetchurl {
1487 1500 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1488 1501 md5 = "5549538d84b8eede6b254cd81ae024fa";
1489 1502 };
1490 1503 meta = {
1491 1504 license = [ pkgs.lib.licenses.gpl2 ];
1492 1505 };
1493 1506 };
1494 1507 translationstring = super.buildPythonPackage {
1495 1508 name = "translationstring-1.3";
1496 1509 buildInputs = with self; [];
1497 1510 doCheck = false;
1498 1511 propagatedBuildInputs = with self; [];
1499 1512 src = fetchurl {
1500 1513 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1501 1514 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1502 1515 };
1503 1516 meta = {
1504 1517 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1505 1518 };
1506 1519 };
1507 1520 trollius = super.buildPythonPackage {
1508 1521 name = "trollius-1.0.4";
1509 1522 buildInputs = with self; [];
1510 1523 doCheck = false;
1511 1524 propagatedBuildInputs = with self; [futures];
1512 1525 src = fetchurl {
1513 1526 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1514 1527 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1515 1528 };
1516 1529 meta = {
1517 1530 license = [ pkgs.lib.licenses.asl20 ];
1518 1531 };
1519 1532 };
1520 1533 uWSGI = super.buildPythonPackage {
1521 1534 name = "uWSGI-2.0.11.2";
1522 1535 buildInputs = with self; [];
1523 1536 doCheck = false;
1524 1537 propagatedBuildInputs = with self; [];
1525 1538 src = fetchurl {
1526 1539 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1527 1540 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1528 1541 };
1529 1542 meta = {
1530 1543 license = [ pkgs.lib.licenses.gpl2 ];
1531 1544 };
1532 1545 };
1533 1546 urllib3 = super.buildPythonPackage {
1534 1547 name = "urllib3-1.16";
1535 1548 buildInputs = with self; [];
1536 1549 doCheck = false;
1537 1550 propagatedBuildInputs = with self; [];
1538 1551 src = fetchurl {
1539 1552 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1540 1553 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1541 1554 };
1542 1555 meta = {
1543 1556 license = [ pkgs.lib.licenses.mit ];
1544 1557 };
1545 1558 };
1546 1559 venusian = super.buildPythonPackage {
1547 1560 name = "venusian-1.0";
1548 1561 buildInputs = with self; [];
1549 1562 doCheck = false;
1550 1563 propagatedBuildInputs = with self; [];
1551 1564 src = fetchurl {
1552 1565 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1553 1566 md5 = "dccf2eafb7113759d60c86faf5538756";
1554 1567 };
1555 1568 meta = {
1556 1569 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1557 1570 };
1558 1571 };
1559 1572 waitress = super.buildPythonPackage {
1560 1573 name = "waitress-0.8.9";
1561 1574 buildInputs = with self; [];
1562 1575 doCheck = false;
1563 1576 propagatedBuildInputs = with self; [setuptools];
1564 1577 src = fetchurl {
1565 1578 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1566 1579 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1567 1580 };
1568 1581 meta = {
1569 1582 license = [ pkgs.lib.licenses.zpt21 ];
1570 1583 };
1571 1584 };
1572 1585 wsgiref = super.buildPythonPackage {
1573 1586 name = "wsgiref-0.1.2";
1574 1587 buildInputs = with self; [];
1575 1588 doCheck = false;
1576 1589 propagatedBuildInputs = with self; [];
1577 1590 src = fetchurl {
1578 1591 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1579 1592 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1580 1593 };
1581 1594 meta = {
1582 1595 license = [ { fullName = "PSF or ZPL"; } ];
1583 1596 };
1584 1597 };
1585 1598 zope.cachedescriptors = super.buildPythonPackage {
1586 1599 name = "zope.cachedescriptors-4.0.0";
1587 1600 buildInputs = with self; [];
1588 1601 doCheck = false;
1589 1602 propagatedBuildInputs = with self; [setuptools];
1590 1603 src = fetchurl {
1591 1604 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1592 1605 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1593 1606 };
1594 1607 meta = {
1595 1608 license = [ pkgs.lib.licenses.zpt21 ];
1596 1609 };
1597 1610 };
1598 1611 zope.deprecation = super.buildPythonPackage {
1599 1612 name = "zope.deprecation-4.1.2";
1600 1613 buildInputs = with self; [];
1601 1614 doCheck = false;
1602 1615 propagatedBuildInputs = with self; [setuptools];
1603 1616 src = fetchurl {
1604 1617 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1605 1618 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1606 1619 };
1607 1620 meta = {
1608 1621 license = [ pkgs.lib.licenses.zpt21 ];
1609 1622 };
1610 1623 };
1611 1624 zope.event = super.buildPythonPackage {
1612 1625 name = "zope.event-4.0.3";
1613 1626 buildInputs = with self; [];
1614 1627 doCheck = false;
1615 1628 propagatedBuildInputs = with self; [setuptools];
1616 1629 src = fetchurl {
1617 1630 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1618 1631 md5 = "9a3780916332b18b8b85f522bcc3e249";
1619 1632 };
1620 1633 meta = {
1621 1634 license = [ pkgs.lib.licenses.zpt21 ];
1622 1635 };
1623 1636 };
1624 1637 zope.interface = super.buildPythonPackage {
1625 1638 name = "zope.interface-4.1.3";
1626 1639 buildInputs = with self; [];
1627 1640 doCheck = false;
1628 1641 propagatedBuildInputs = with self; [setuptools];
1629 1642 src = fetchurl {
1630 1643 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1631 1644 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1632 1645 };
1633 1646 meta = {
1634 1647 license = [ pkgs.lib.licenses.zpt21 ];
1635 1648 };
1636 1649 };
1637 1650
1638 1651 ### Test requirements
1639 1652
1640 1653
1641 1654 }
@@ -1,151 +1,152 b''
1 1 Babel==1.3
2 2 Beaker==1.7.0
3 3 CProfileV==1.0.6
4 4 Fabric==1.10.0
5 5 FormEncode==1.2.4
6 6 Jinja2==2.7.3
7 7 Mako==1.0.1
8 8 Markdown==2.6.2
9 9 MarkupSafe==0.23
10 10 MySQL-python==1.2.5
11 11 Paste==2.0.2
12 12 PasteDeploy==1.5.2
13 13 PasteScript==1.7.5
14 14 Pygments==2.1.3
15 15
16 16 # TODO: This version is not available on PyPI
17 17 # Pylons==1.0.2.dev20160108
18 18 Pylons==1.0.1
19 19
20 20 # TODO: This version is not available, but newer ones are
21 21 # Pyro4==4.35
22 22 Pyro4==4.41
23 23
24 24 # TODO: This should probably not be in here
25 25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
26 26
27 27 # TODO: This is not really a dependency, we should add it only
28 28 # into the development environment, since there it is useful.
29 29 # RhodeCodeVCSServer==3.9.0
30 30
31 31 Routes==1.13
32 32 SQLAlchemy==0.9.9
33 33 Sphinx==1.2.2
34 34 Tempita==0.5.2
35 35 URLObject==2.4.0
36 36 WebError==0.10.3
37 37
38 38 # TODO: This is modified by us, needs a better integration. For now
39 39 # using the latest version before.
40 40 # WebHelpers==1.3.dev20150807
41 41 WebHelpers==1.3
42 42
43 43 WebHelpers2==2.0
44 44 WebOb==1.3.1
45 45 WebTest==1.4.3
46 46 Whoosh==2.7.0
47 47 alembic==0.8.4
48 48 amqplib==1.0.2
49 49 anyjson==0.3.3
50 50 appenlight-client==0.6.14
51 51 authomatic==0.1.0.post1;
52 52 backport-ipaddress==0.1
53 53 bottle==0.12.8
54 54 bumpversion==0.5.3
55 55 celery==2.2.10
56 56 click==5.1
57 57 colander==1.2
58 58 configobj==5.0.6
59 59 cov-core==1.15.0
60 60 coverage==3.7.1
61 61 cssselect==0.9.1
62 62 decorator==3.4.2
63 63 docutils==0.12
64 64 dogpile.cache==0.6.1
65 65 dogpile.core==0.4.1
66 66 dulwich==0.12.0
67 67 ecdsa==0.11
68 68 flake8==2.4.1
69 69 future==0.14.3
70 70 futures==3.0.2
71 71 gprof2dot==2015.12.1
72 72 gunicorn==19.6.0
73 73
74 74 # TODO: Needs subvertpy and blows up without Subversion headers,
75 75 # actually we should not need this for Enterprise at all.
76 76 # hgsubversion==1.8.2
77 77
78 78 gnureadline==6.3.3
79 79 infrae.cache==1.0.1
80 80 invoke==0.13.0
81 81 ipdb==0.8
82 82 ipython==3.1.0
83 83 iso8601==0.1.11
84 84 itsdangerous==0.24
85 85 kombu==1.5.1
86 86 lxml==3.4.4
87 marshmallow==2.8.0
87 88 mccabe==0.3
88 89 meld3==1.0.2
89 90 mock==1.0.1
90 91 msgpack-python==0.4.6
91 92 nose==1.3.6
92 93 objgraph==2.0.0
93 94 packaging==15.2
94 95 paramiko==1.15.1
95 96 pep8==1.5.7
96 97 psutil==2.2.1
97 98 psycopg2==2.6.1
98 99 py==1.4.29
99 100 py-bcrypt==0.4
100 101 py-gfm==0.1.3
101 102 pycrypto==2.6.1
102 103 pycurl==7.19.5
103 104 pyflakes==0.8.1
104 105 pyparsing==1.5.7
105 106 pyramid==1.6.1
106 107 pyramid-beaker==0.8
107 108 pyramid-debugtoolbar==2.4.2
108 109 pyramid-jinja2==2.5
109 110 pyramid-mako==1.0.2
110 111 pysqlite==2.6.3
111 112 pytest==2.8.5
112 113 pytest-runner==2.7.1
113 114 pytest-catchlog==1.2.2
114 115 pytest-cov==1.8.1
115 116 pytest-profiling==1.0.1
116 117 pytest-timeout==0.4
117 118 python-dateutil==1.5
118 119 python-ldap==2.4.19
119 120 python-memcached==1.57
120 121 python-pam==1.8.2
121 122 pytz==2015.4
122 123 pyzmq==14.6.0
123 124
124 125 # TODO: This is not available in public
125 126 # rc-testdata==0.2.0
126 127
127 128 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
128 129
129 130
130 131 recaptcha-client==1.0.6
131 132 repoze.lru==0.6
132 133 requests==2.9.1
133 134 serpent==1.12
134 135 setproctitle==1.1.8
135 136 setuptools==20.8.1
136 137 setuptools-scm==1.11.0
137 138 simplejson==3.7.2
138 139 six==1.9.0
139 140 subprocess32==3.2.6
140 141 supervisor==3.3.0
141 142 transifex-client==0.10
142 143 translationstring==1.3
143 144 trollius==1.0.4
144 145 uWSGI==2.0.11.2
145 146 venusian==1.0
146 147 waitress==0.8.9
147 148 wsgiref==0.1.2
148 149 zope.cachedescriptors==4.0.0
149 150 zope.deprecation==4.1.2
150 151 zope.event==4.0.3
151 152 zope.interface==4.1.3
@@ -1,158 +1,163 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.model.repo import RepoModel
25 25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 28 from rhodecode.tests.fixture import Fixture
29 29
30 30
31 31 fixture = Fixture()
32 32
33 33 UPDATE_REPO_NAME = 'api_update_me'
34 34
35 35 class SAME_AS_UPDATES(object): """ Constant used for tests below """
36 36
37 37 @pytest.mark.usefixtures("testuser_api", "app")
38 38 class TestApiUpdateRepo(object):
39 39
40 40 @pytest.mark.parametrize("updates, expected", [
41 41 ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES),
42 42 ({'description': 'new description'}, SAME_AS_UPDATES),
43 43 ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES),
44 44 ({'clone_uri': None}, {'clone_uri': ''}),
45 45 ({'clone_uri': ''}, {'clone_uri': ''}),
46 46 ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}),
47 47 ({'enable_statistics': True}, SAME_AS_UPDATES),
48 48 ({'enable_locking': True}, SAME_AS_UPDATES),
49 49 ({'enable_downloads': True}, SAME_AS_UPDATES),
50 ({'name': 'new_repo_name'}, {'repo_name': 'new_repo_name'}),
51 ({'group': 'test_group_for_update'},
52 {'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME}),
50 ({'name': 'new_repo_name'}, {
51 'repo_name': 'new_repo_name',
52 'url': 'http://test.example.com:80/new_repo_name',
53 }),
54 ({'group': 'test_group_for_update'}, {
55 'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME,
56 'url': 'http://test.example.com:80/test_group_for_update/%s' % UPDATE_REPO_NAME
57 }),
53 58 ])
54 59 def test_api_update_repo(self, updates, expected, backend):
55 60 repo_name = UPDATE_REPO_NAME
56 61 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
57 62 if updates.get('group'):
58 63 fixture.create_repo_group(updates['group'])
59 64
60 65 expected_api_data = repo.get_api_data(include_secrets=True)
61 66 if expected is SAME_AS_UPDATES:
62 67 expected_api_data.update(updates)
63 68 else:
64 69 expected_api_data.update(expected)
65 70
66 71
67 72 id_, params = build_data(
68 73 self.apikey, 'update_repo', repoid=repo_name, **updates)
69 74 response = api_call(self.app, params)
70 75
71 76 if updates.get('name'):
72 77 repo_name = updates['name']
73 78 if updates.get('group'):
74 79 repo_name = '/'.join([updates['group'], repo_name])
75 80
76 81 try:
77 82 expected = {
78 83 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
79 84 'repository': jsonify(expected_api_data)
80 85 }
81 86 assert_ok(id_, expected, given=response.body)
82 87 finally:
83 88 fixture.destroy_repo(repo_name)
84 89 if updates.get('group'):
85 90 fixture.destroy_repo_group(updates['group'])
86 91
87 92 def test_api_update_repo_fork_of_field(self, backend):
88 93 master_repo = backend.create_repo()
89 94 repo = backend.create_repo()
90 95 updates = {
91 96 'fork_of': master_repo.repo_name
92 97 }
93 98 expected_api_data = repo.get_api_data(include_secrets=True)
94 99 expected_api_data.update(updates)
95 100
96 101 id_, params = build_data(
97 102 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
98 103 response = api_call(self.app, params)
99 104 expected = {
100 105 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
101 106 'repository': jsonify(expected_api_data)
102 107 }
103 108 assert_ok(id_, expected, given=response.body)
104 109 result = response.json['result']['repository']
105 110 assert result['fork_of'] == master_repo.repo_name
106 111
107 112 def test_api_update_repo_fork_of_not_found(self, backend):
108 113 master_repo_name = 'fake-parent-repo'
109 114 repo = backend.create_repo()
110 115 updates = {
111 116 'fork_of': master_repo_name
112 117 }
113 118 id_, params = build_data(
114 119 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
115 120 response = api_call(self.app, params)
116 121 expected = 'repository `{}` does not exist'.format(master_repo_name)
117 122 assert_error(id_, expected, given=response.body)
118 123
119 124 def test_api_update_repo_with_repo_group_not_existing(self):
120 125 repo_name = 'admin_owned'
121 126 fixture.create_repo(repo_name)
122 127 updates = {'group': 'test_group_for_update'}
123 128 id_, params = build_data(
124 129 self.apikey, 'update_repo', repoid=repo_name, **updates)
125 130 response = api_call(self.app, params)
126 131 try:
127 132 expected = 'repository group `%s` does not exist' % (
128 133 updates['group'],)
129 134 assert_error(id_, expected, given=response.body)
130 135 finally:
131 136 fixture.destroy_repo(repo_name)
132 137
133 138 def test_api_update_repo_regular_user_not_allowed(self):
134 139 repo_name = 'admin_owned'
135 140 fixture.create_repo(repo_name)
136 141 updates = {'active': False}
137 142 id_, params = build_data(
138 143 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
139 144 response = api_call(self.app, params)
140 145 try:
141 146 expected = 'repository `%s` does not exist' % (repo_name,)
142 147 assert_error(id_, expected, given=response.body)
143 148 finally:
144 149 fixture.destroy_repo(repo_name)
145 150
146 151 @mock.patch.object(RepoModel, 'update', crash)
147 152 def test_api_update_repo_exception_occurred(self, backend):
148 153 repo_name = UPDATE_REPO_NAME
149 154 fixture.create_repo(repo_name, repo_type=backend.alias)
150 155 id_, params = build_data(
151 156 self.apikey, 'update_repo', repoid=repo_name,
152 157 owner=TEST_USER_ADMIN_LOGIN,)
153 158 response = api_call(self.app, params)
154 159 try:
155 160 expected = 'failed to update repo `%s`' % (repo_name,)
156 161 assert_error(id_, expected, given=response.body)
157 162 finally:
158 163 fixture.destroy_repo(repo_name)
@@ -1,846 +1,847 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 pull requests controller for rhodecode for initializing pull requests
23 23 """
24 24
25 25 import formencode
26 26 import logging
27 27
28 28 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
29 29 from pylons import request, tmpl_context as c, url
30 30 from pylons.controllers.util import redirect
31 31 from pylons.i18n.translation import _
32 32 from sqlalchemy.sql import func
33 33 from sqlalchemy.sql.expression import or_
34 34
35 35 from rhodecode.lib import auth, diffs, helpers as h
36 36 from rhodecode.lib.ext_json import json
37 37 from rhodecode.lib.base import (
38 38 BaseRepoController, render, vcs_operation_context)
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
41 41 HasAcceptedRepoType, XHRRequired)
42 42 from rhodecode.lib.utils import jsonify
43 43 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError)
47 47 from rhodecode.lib.diffs import LimitedDiffContainer
48 48 from rhodecode.model.changeset_status import ChangesetStatusModel
49 49 from rhodecode.model.comment import ChangesetCommentsModel
50 50 from rhodecode.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
51 51 Repository
52 52 from rhodecode.model.forms import PullRequestForm
53 53 from rhodecode.model.meta import Session
54 54 from rhodecode.model.pull_request import PullRequestModel
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class PullrequestsController(BaseRepoController):
60 60 def __before__(self):
61 61 super(PullrequestsController, self).__before__()
62 62
63 63 def _load_compare_data(self, pull_request, enable_comments=True):
64 64 """
65 65 Load context data needed for generating compare diff
66 66
67 67 :param pull_request: object related to the request
68 68 :param enable_comments: flag to determine if comments are included
69 69 """
70 70 source_repo = pull_request.source_repo
71 71 source_ref_id = pull_request.source_ref_parts.commit_id
72 72
73 73 target_repo = pull_request.target_repo
74 74 target_ref_id = pull_request.target_ref_parts.commit_id
75 75
76 76 # despite opening commits for bookmarks/branches/tags, we always
77 77 # convert this to rev to prevent changes after bookmark or branch change
78 78 c.source_ref_type = 'rev'
79 79 c.source_ref = source_ref_id
80 80
81 81 c.target_ref_type = 'rev'
82 82 c.target_ref = target_ref_id
83 83
84 84 c.source_repo = source_repo
85 85 c.target_repo = target_repo
86 86
87 87 c.fulldiff = bool(request.GET.get('fulldiff'))
88 88
89 89 # diff_limit is the old behavior, will cut off the whole diff
90 90 # if the limit is applied otherwise will just hide the
91 91 # big files from the front-end
92 92 diff_limit = self.cut_off_limit_diff
93 93 file_limit = self.cut_off_limit_file
94 94
95 95 pre_load = ["author", "branch", "date", "message"]
96 96
97 97 c.commit_ranges = []
98 98 source_commit = EmptyCommit()
99 99 target_commit = EmptyCommit()
100 100 c.missing_requirements = False
101 101 try:
102 102 c.commit_ranges = [
103 103 source_repo.get_commit(commit_id=rev, pre_load=pre_load)
104 104 for rev in pull_request.revisions]
105 105
106 106 c.statuses = source_repo.statuses(
107 107 [x.raw_id for x in c.commit_ranges])
108 108
109 109 target_commit = source_repo.get_commit(
110 110 commit_id=safe_str(target_ref_id))
111 111 source_commit = source_repo.get_commit(
112 112 commit_id=safe_str(source_ref_id))
113 113 except RepositoryRequirementError:
114 114 c.missing_requirements = True
115 115
116 116 c.missing_commits = False
117 117 if (c.missing_requirements or
118 118 isinstance(source_commit, EmptyCommit) or
119 119 source_commit == target_commit):
120 120 _parsed = []
121 121 c.missing_commits = True
122 122 else:
123 123 vcs_diff = PullRequestModel().get_diff(pull_request)
124 124 diff_processor = diffs.DiffProcessor(
125 125 vcs_diff, format='gitdiff', diff_limit=diff_limit,
126 126 file_limit=file_limit, show_full_diff=c.fulldiff)
127 127 _parsed = diff_processor.prepare()
128 128
129 129 c.limited_diff = isinstance(_parsed, LimitedDiffContainer)
130 130
131 131 c.files = []
132 132 c.changes = {}
133 133 c.lines_added = 0
134 134 c.lines_deleted = 0
135 135 c.included_files = []
136 136 c.deleted_files = []
137 137
138 138 for f in _parsed:
139 139 st = f['stats']
140 140 c.lines_added += st['added']
141 141 c.lines_deleted += st['deleted']
142 142
143 143 fid = h.FID('', f['filename'])
144 144 c.files.append([fid, f['operation'], f['filename'], f['stats']])
145 145 c.included_files.append(f['filename'])
146 146 html_diff = diff_processor.as_html(enable_comments=enable_comments,
147 147 parsed_lines=[f])
148 148 c.changes[fid] = [f['operation'], f['filename'], html_diff, f]
149 149
150 150 def _extract_ordering(self, request):
151 151 column_index = safe_int(request.GET.get('order[0][column]'))
152 152 order_dir = request.GET.get('order[0][dir]', 'desc')
153 153 order_by = request.GET.get(
154 154 'columns[%s][data][sort]' % column_index, 'name_raw')
155 155 return order_by, order_dir
156 156
157 157 @LoginRequired()
158 158 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
159 159 'repository.admin')
160 160 @HasAcceptedRepoType('git', 'hg')
161 161 def show_all(self, repo_name):
162 162 # filter types
163 163 c.active = 'open'
164 164 c.source = str2bool(request.GET.get('source'))
165 165 c.closed = str2bool(request.GET.get('closed'))
166 166 c.my = str2bool(request.GET.get('my'))
167 167 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
168 168 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
169 169 c.repo_name = repo_name
170 170
171 171 opened_by = None
172 172 if c.my:
173 173 c.active = 'my'
174 174 opened_by = [c.rhodecode_user.user_id]
175 175
176 176 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
177 177 if c.closed:
178 178 c.active = 'closed'
179 179 statuses = [PullRequest.STATUS_CLOSED]
180 180
181 181 if c.awaiting_review and not c.source:
182 182 c.active = 'awaiting'
183 183 if c.source and not c.awaiting_review:
184 184 c.active = 'source'
185 185 if c.awaiting_my_review:
186 186 c.active = 'awaiting_my'
187 187
188 188 data = self._get_pull_requests_list(
189 189 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
190 190 if not request.is_xhr:
191 191 c.data = json.dumps(data['data'])
192 192 c.records_total = data['recordsTotal']
193 193 return render('/pullrequests/pullrequests.html')
194 194 else:
195 195 return json.dumps(data)
196 196
197 197 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
198 198 # pagination
199 199 start = safe_int(request.GET.get('start'), 0)
200 200 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
201 201 order_by, order_dir = self._extract_ordering(request)
202 202
203 203 if c.awaiting_review:
204 204 pull_requests = PullRequestModel().get_awaiting_review(
205 205 repo_name, source=c.source, opened_by=opened_by,
206 206 statuses=statuses, offset=start, length=length,
207 207 order_by=order_by, order_dir=order_dir)
208 208 pull_requests_total_count = PullRequestModel(
209 209 ).count_awaiting_review(
210 210 repo_name, source=c.source, statuses=statuses,
211 211 opened_by=opened_by)
212 212 elif c.awaiting_my_review:
213 213 pull_requests = PullRequestModel().get_awaiting_my_review(
214 214 repo_name, source=c.source, opened_by=opened_by,
215 215 user_id=c.rhodecode_user.user_id, statuses=statuses,
216 216 offset=start, length=length, order_by=order_by,
217 217 order_dir=order_dir)
218 218 pull_requests_total_count = PullRequestModel(
219 219 ).count_awaiting_my_review(
220 220 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
221 221 statuses=statuses, opened_by=opened_by)
222 222 else:
223 223 pull_requests = PullRequestModel().get_all(
224 224 repo_name, source=c.source, opened_by=opened_by,
225 225 statuses=statuses, offset=start, length=length,
226 226 order_by=order_by, order_dir=order_dir)
227 227 pull_requests_total_count = PullRequestModel().count_all(
228 228 repo_name, source=c.source, statuses=statuses,
229 229 opened_by=opened_by)
230 230
231 231 from rhodecode.lib.utils import PartialRenderer
232 232 _render = PartialRenderer('data_table/_dt_elements.html')
233 233 data = []
234 234 for pr in pull_requests:
235 235 comments = ChangesetCommentsModel().get_all_comments(
236 236 c.rhodecode_db_repo.repo_id, pull_request=pr)
237 237
238 238 data.append({
239 239 'name': _render('pullrequest_name',
240 240 pr.pull_request_id, pr.target_repo.repo_name),
241 241 'name_raw': pr.pull_request_id,
242 242 'status': _render('pullrequest_status',
243 243 pr.calculated_review_status()),
244 244 'title': _render(
245 245 'pullrequest_title', pr.title, pr.description),
246 246 'description': h.escape(pr.description),
247 247 'updated_on': _render('pullrequest_updated_on',
248 248 h.datetime_to_time(pr.updated_on)),
249 249 'updated_on_raw': h.datetime_to_time(pr.updated_on),
250 250 'created_on': _render('pullrequest_updated_on',
251 251 h.datetime_to_time(pr.created_on)),
252 252 'created_on_raw': h.datetime_to_time(pr.created_on),
253 253 'author': _render('pullrequest_author',
254 254 pr.author.full_contact, ),
255 255 'author_raw': pr.author.full_name,
256 256 'comments': _render('pullrequest_comments', len(comments)),
257 257 'comments_raw': len(comments),
258 258 'closed': pr.is_closed(),
259 259 })
260 260 # json used to render the grid
261 261 data = ({
262 262 'data': data,
263 263 'recordsTotal': pull_requests_total_count,
264 264 'recordsFiltered': pull_requests_total_count,
265 265 })
266 266 return data
267 267
268 268 @LoginRequired()
269 269 @NotAnonymous()
270 270 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
271 271 'repository.admin')
272 272 @HasAcceptedRepoType('git', 'hg')
273 273 def index(self):
274 274 source_repo = c.rhodecode_db_repo
275 275
276 276 try:
277 277 source_repo.scm_instance().get_commit()
278 278 except EmptyRepositoryError:
279 279 h.flash(h.literal(_('There are no commits yet')),
280 280 category='warning')
281 281 redirect(url('summary_home', repo_name=source_repo.repo_name))
282 282
283 283 commit_id = request.GET.get('commit')
284 284 branch_ref = request.GET.get('branch')
285 285 bookmark_ref = request.GET.get('bookmark')
286 286
287 287 try:
288 288 source_repo_data = PullRequestModel().generate_repo_data(
289 289 source_repo, commit_id=commit_id,
290 290 branch=branch_ref, bookmark=bookmark_ref)
291 291 except CommitDoesNotExistError as e:
292 292 log.exception(e)
293 293 h.flash(_('Commit does not exist'), 'error')
294 294 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
295 295
296 296 default_target_repo = source_repo
297 297 if (source_repo.parent and
298 298 not source_repo.parent.scm_instance().is_empty()):
299 299 # change default if we have a parent repo
300 300 default_target_repo = source_repo.parent
301 301
302 302 target_repo_data = PullRequestModel().generate_repo_data(
303 303 default_target_repo)
304 304
305 305 selected_source_ref = source_repo_data['refs']['selected_ref']
306 306
307 307 title_source_ref = selected_source_ref.split(':', 2)[1]
308 308 c.default_title = PullRequestModel().generate_pullrequest_title(
309 309 source=source_repo.repo_name,
310 310 source_ref=title_source_ref,
311 311 target=default_target_repo.repo_name
312 312 )
313 313
314 314 c.default_repo_data = {
315 315 'source_repo_name': source_repo.repo_name,
316 316 'source_refs_json': json.dumps(source_repo_data),
317 317 'target_repo_name': default_target_repo.repo_name,
318 318 'target_refs_json': json.dumps(target_repo_data),
319 319 }
320 320 c.default_source_ref = selected_source_ref
321 321
322 322 return render('/pullrequests/pullrequest.html')
323 323
324 324 @LoginRequired()
325 325 @NotAnonymous()
326 326 @XHRRequired()
327 327 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
328 328 'repository.admin')
329 329 @jsonify
330 330 def get_repo_refs(self, repo_name, target_repo_name):
331 331 repo = Repository.get_by_repo_name(target_repo_name)
332 332 if not repo:
333 333 raise HTTPNotFound
334 334 return PullRequestModel().generate_repo_data(repo)
335 335
336 336 @LoginRequired()
337 337 @NotAnonymous()
338 338 @XHRRequired()
339 339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
340 340 'repository.admin')
341 341 @jsonify
342 342 def get_repo_destinations(self, repo_name):
343 343 repo = Repository.get_by_repo_name(repo_name)
344 344 if not repo:
345 345 raise HTTPNotFound
346 346 filter_query = request.GET.get('query')
347 347
348 348 query = Repository.query() \
349 349 .order_by(func.length(Repository.repo_name)) \
350 350 .filter(or_(
351 351 Repository.repo_name == repo.repo_name,
352 352 Repository.fork_id == repo.repo_id))
353 353
354 354 if filter_query:
355 355 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
356 356 query = query.filter(
357 357 Repository.repo_name.ilike(ilike_expression))
358 358
359 359 add_parent = False
360 360 if repo.parent:
361 361 if filter_query in repo.parent.repo_name:
362 362 if not repo.parent.scm_instance().is_empty():
363 363 add_parent = True
364 364
365 365 limit = 20 - 1 if add_parent else 20
366 366 all_repos = query.limit(limit).all()
367 367 if add_parent:
368 368 all_repos += [repo.parent]
369 369
370 370 repos = []
371 371 for obj in self.scm_model.get_repos(all_repos):
372 372 repos.append({
373 373 'id': obj['name'],
374 374 'text': obj['name'],
375 375 'type': 'repo',
376 376 'obj': obj['dbrepo']
377 377 })
378 378
379 379 data = {
380 380 'more': False,
381 381 'results': [{
382 382 'text': _('Repositories'),
383 383 'children': repos
384 384 }] if repos else []
385 385 }
386 386 return data
387 387
388 388 @LoginRequired()
389 389 @NotAnonymous()
390 390 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
391 391 'repository.admin')
392 392 @HasAcceptedRepoType('git', 'hg')
393 393 @auth.CSRFRequired()
394 394 def create(self, repo_name):
395 395 repo = Repository.get_by_repo_name(repo_name)
396 396 if not repo:
397 397 raise HTTPNotFound
398 398
399 399 try:
400 400 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
401 401 except formencode.Invalid as errors:
402 402 if errors.error_dict.get('revisions'):
403 403 msg = 'Revisions: %s' % errors.error_dict['revisions']
404 404 elif errors.error_dict.get('pullrequest_title'):
405 405 msg = _('Pull request requires a title with min. 3 chars')
406 406 else:
407 407 msg = _('Error creating pull request: {}').format(errors)
408 408 log.exception(msg)
409 409 h.flash(msg, 'error')
410 410
411 411 # would rather just go back to form ...
412 412 return redirect(url('pullrequest_home', repo_name=repo_name))
413 413
414 414 source_repo = _form['source_repo']
415 415 source_ref = _form['source_ref']
416 416 target_repo = _form['target_repo']
417 417 target_ref = _form['target_ref']
418 418 commit_ids = _form['revisions'][::-1]
419 419 reviewers = _form['review_members']
420 420
421 421 # find the ancestor for this pr
422 422 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
423 423 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
424 424
425 425 source_scm = source_db_repo.scm_instance()
426 426 target_scm = target_db_repo.scm_instance()
427 427
428 428 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
429 429 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
430 430
431 431 ancestor = source_scm.get_common_ancestor(
432 432 source_commit.raw_id, target_commit.raw_id, target_scm)
433 433
434 434 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
435 435 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
436 436
437 437 pullrequest_title = _form['pullrequest_title']
438 438 title_source_ref = source_ref.split(':', 2)[1]
439 439 if not pullrequest_title:
440 440 pullrequest_title = PullRequestModel().generate_pullrequest_title(
441 441 source=source_repo,
442 442 source_ref=title_source_ref,
443 443 target=target_repo
444 444 )
445 445
446 446 description = _form['pullrequest_desc']
447 447 try:
448 448 pull_request = PullRequestModel().create(
449 449 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
450 450 target_ref, commit_ids, reviewers, pullrequest_title,
451 451 description
452 452 )
453 453 Session().commit()
454 454 h.flash(_('Successfully opened new pull request'),
455 455 category='success')
456 456 except Exception as e:
457 raise
457 458 msg = _('Error occurred during sending pull request')
458 459 log.exception(msg)
459 460 h.flash(msg, category='error')
460 461 return redirect(url('pullrequest_home', repo_name=repo_name))
461 462
462 463 return redirect(url('pullrequest_show', repo_name=target_repo,
463 464 pull_request_id=pull_request.pull_request_id))
464 465
465 466 @LoginRequired()
466 467 @NotAnonymous()
467 468 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
468 469 'repository.admin')
469 470 @auth.CSRFRequired()
470 471 @jsonify
471 472 def update(self, repo_name, pull_request_id):
472 473 pull_request_id = safe_int(pull_request_id)
473 474 pull_request = PullRequest.get_or_404(pull_request_id)
474 475 # only owner or admin can update it
475 476 allowed_to_update = PullRequestModel().check_user_update(
476 477 pull_request, c.rhodecode_user)
477 478 if allowed_to_update:
478 479 if 'reviewers_ids' in request.POST:
479 480 self._update_reviewers(pull_request_id)
480 481 elif str2bool(request.POST.get('update_commits', 'false')):
481 482 self._update_commits(pull_request)
482 483 elif str2bool(request.POST.get('close_pull_request', 'false')):
483 484 self._reject_close(pull_request)
484 485 elif str2bool(request.POST.get('edit_pull_request', 'false')):
485 486 self._edit_pull_request(pull_request)
486 487 else:
487 488 raise HTTPBadRequest()
488 489 return True
489 490 raise HTTPForbidden()
490 491
491 492 def _edit_pull_request(self, pull_request):
492 493 try:
493 494 PullRequestModel().edit(
494 495 pull_request, request.POST.get('title'),
495 496 request.POST.get('description'))
496 497 except ValueError:
497 498 msg = _(u'Cannot update closed pull requests.')
498 499 h.flash(msg, category='error')
499 500 return
500 501 else:
501 502 Session().commit()
502 503
503 504 msg = _(u'Pull request title & description updated.')
504 505 h.flash(msg, category='success')
505 506 return
506 507
507 508 def _update_commits(self, pull_request):
508 509 try:
509 510 if PullRequestModel().has_valid_update_type(pull_request):
510 511 updated_version, changes = PullRequestModel().update_commits(
511 512 pull_request)
512 513 if updated_version:
513 514 msg = _(
514 515 u'Pull request updated to "{source_commit_id}" with '
515 516 u'{count_added} added, {count_removed} removed '
516 517 u'commits.'
517 518 ).format(
518 519 source_commit_id=pull_request.source_ref_parts.commit_id,
519 520 count_added=len(changes.added),
520 521 count_removed=len(changes.removed))
521 522 h.flash(msg, category='success')
522 523 else:
523 524 h.flash(_("Nothing changed in pull request."),
524 525 category='warning')
525 526 else:
526 527 msg = _(
527 528 u"Skipping update of pull request due to reference "
528 529 u"type: {reference_type}"
529 530 ).format(reference_type=pull_request.source_ref_parts.type)
530 531 h.flash(msg, category='warning')
531 532 except CommitDoesNotExistError:
532 533 h.flash(
533 534 _(u'Update failed due to missing commits.'), category='error')
534 535
535 536 @auth.CSRFRequired()
536 537 @LoginRequired()
537 538 @NotAnonymous()
538 539 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
539 540 'repository.admin')
540 541 def merge(self, repo_name, pull_request_id):
541 542 """
542 543 POST /{repo_name}/pull-request/{pull_request_id}
543 544
544 545 Merge will perform a server-side merge of the specified
545 546 pull request, if the pull request is approved and mergeable.
546 547 After succesfull merging, the pull request is automatically
547 548 closed, with a relevant comment.
548 549 """
549 550 pull_request_id = safe_int(pull_request_id)
550 551 pull_request = PullRequest.get_or_404(pull_request_id)
551 552 user = c.rhodecode_user
552 553
553 554 if self._meets_merge_pre_conditions(pull_request, user):
554 555 log.debug("Pre-conditions checked, trying to merge.")
555 556 extras = vcs_operation_context(
556 557 request.environ, repo_name=pull_request.target_repo.repo_name,
557 558 username=user.username, action='push',
558 559 scm=pull_request.target_repo.repo_type)
559 560 self._merge_pull_request(pull_request, user, extras)
560 561
561 562 return redirect(url(
562 563 'pullrequest_show',
563 564 repo_name=pull_request.target_repo.repo_name,
564 565 pull_request_id=pull_request.pull_request_id))
565 566
566 567 def _meets_merge_pre_conditions(self, pull_request, user):
567 568 if not PullRequestModel().check_user_merge(pull_request, user):
568 569 raise HTTPForbidden()
569 570
570 571 merge_status, msg = PullRequestModel().merge_status(pull_request)
571 572 if not merge_status:
572 573 log.debug("Cannot merge, not mergeable.")
573 574 h.flash(msg, category='error')
574 575 return False
575 576
576 577 if (pull_request.calculated_review_status()
577 578 is not ChangesetStatus.STATUS_APPROVED):
578 579 log.debug("Cannot merge, approval is pending.")
579 580 msg = _('Pull request reviewer approval is pending.')
580 581 h.flash(msg, category='error')
581 582 return False
582 583 return True
583 584
584 585 def _merge_pull_request(self, pull_request, user, extras):
585 586 merge_resp = PullRequestModel().merge(
586 587 pull_request, user, extras=extras)
587 588
588 589 if merge_resp.executed:
589 590 log.debug("The merge was successful, closing the pull request.")
590 591 PullRequestModel().close_pull_request(
591 592 pull_request.pull_request_id, user)
592 593 Session().commit()
593 594 msg = _('Pull request was successfully merged and closed.')
594 595 h.flash(msg, category='success')
595 596 else:
596 597 log.debug(
597 598 "The merge was not successful. Merge response: %s",
598 599 merge_resp)
599 600 msg = PullRequestModel().merge_status_message(
600 601 merge_resp.failure_reason)
601 602 h.flash(msg, category='error')
602 603
603 604 def _update_reviewers(self, pull_request_id):
604 605 reviewers_ids = map(int, filter(
605 606 lambda v: v not in [None, ''],
606 607 request.POST.get('reviewers_ids', '').split(',')))
607 608 PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
608 609 Session().commit()
609 610
610 611 def _reject_close(self, pull_request):
611 612 if pull_request.is_closed():
612 613 raise HTTPForbidden()
613 614
614 615 PullRequestModel().close_pull_request_with_comment(
615 616 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
616 617 Session().commit()
617 618
618 619 @LoginRequired()
619 620 @NotAnonymous()
620 621 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
621 622 'repository.admin')
622 623 @auth.CSRFRequired()
623 624 @jsonify
624 625 def delete(self, repo_name, pull_request_id):
625 626 pull_request_id = safe_int(pull_request_id)
626 627 pull_request = PullRequest.get_or_404(pull_request_id)
627 628 # only owner can delete it !
628 629 if pull_request.author.user_id == c.rhodecode_user.user_id:
629 630 PullRequestModel().delete(pull_request)
630 631 Session().commit()
631 632 h.flash(_('Successfully deleted pull request'),
632 633 category='success')
633 634 return redirect(url('my_account_pullrequests'))
634 635 raise HTTPForbidden()
635 636
636 637 @LoginRequired()
637 638 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
638 639 'repository.admin')
639 640 def show(self, repo_name, pull_request_id):
640 641 pull_request_id = safe_int(pull_request_id)
641 642 c.pull_request = PullRequest.get_or_404(pull_request_id)
642 643
643 644 # pull_requests repo_name we opened it against
644 645 # ie. target_repo must match
645 646 if repo_name != c.pull_request.target_repo.repo_name:
646 647 raise HTTPNotFound
647 648
648 649 c.allowed_to_change_status = PullRequestModel(). \
649 650 check_user_change_status(c.pull_request, c.rhodecode_user)
650 651 c.allowed_to_update = PullRequestModel().check_user_update(
651 652 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
652 653 c.allowed_to_merge = PullRequestModel().check_user_merge(
653 654 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
654 655
655 656 cc_model = ChangesetCommentsModel()
656 657
657 658 c.pull_request_reviewers = c.pull_request.reviewers_statuses()
658 659
659 660 c.pull_request_review_status = c.pull_request.calculated_review_status()
660 661 c.pr_merge_status, c.pr_merge_msg = PullRequestModel().merge_status(
661 662 c.pull_request)
662 663 c.approval_msg = None
663 664 if c.pull_request_review_status != ChangesetStatus.STATUS_APPROVED:
664 665 c.approval_msg = _('Reviewer approval is pending.')
665 666 c.pr_merge_status = False
666 667 # load compare data into template context
667 668 enable_comments = not c.pull_request.is_closed()
668 669 self._load_compare_data(c.pull_request, enable_comments=enable_comments)
669 670
670 671 # this is a hack to properly display links, when creating PR, the
671 672 # compare view and others uses different notation, and
672 673 # compare_commits.html renders links based on the target_repo.
673 674 # We need to swap that here to generate it properly on the html side
674 675 c.target_repo = c.source_repo
675 676
676 677 # inline comments
677 678 c.inline_cnt = 0
678 679 c.inline_comments = cc_model.get_inline_comments(
679 680 c.rhodecode_db_repo.repo_id,
680 681 pull_request=pull_request_id).items()
681 682 # count inline comments
682 683 for __, lines in c.inline_comments:
683 684 for comments in lines.values():
684 685 c.inline_cnt += len(comments)
685 686
686 687 # outdated comments
687 688 c.outdated_cnt = 0
688 689 if ChangesetCommentsModel.use_outdated_comments(c.pull_request):
689 690 c.outdated_comments = cc_model.get_outdated_comments(
690 691 c.rhodecode_db_repo.repo_id,
691 692 pull_request=c.pull_request)
692 693 # Count outdated comments and check for deleted files
693 694 for file_name, lines in c.outdated_comments.iteritems():
694 695 for comments in lines.values():
695 696 c.outdated_cnt += len(comments)
696 697 if file_name not in c.included_files:
697 698 c.deleted_files.append(file_name)
698 699 else:
699 700 c.outdated_comments = {}
700 701
701 702 # comments
702 703 c.comments = cc_model.get_comments(c.rhodecode_db_repo.repo_id,
703 704 pull_request=pull_request_id)
704 705
705 706 if c.allowed_to_update:
706 707 force_close = ('forced_closed', _('Close Pull Request'))
707 708 statuses = ChangesetStatus.STATUSES + [force_close]
708 709 else:
709 710 statuses = ChangesetStatus.STATUSES
710 711 c.commit_statuses = statuses
711 712
712 713 c.ancestor = None # TODO: add ancestor here
713 714
714 715 return render('/pullrequests/pullrequest_show.html')
715 716
716 717 @LoginRequired()
717 718 @NotAnonymous()
718 719 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
719 720 'repository.admin')
720 721 @auth.CSRFRequired()
721 722 @jsonify
722 723 def comment(self, repo_name, pull_request_id):
723 724 pull_request_id = safe_int(pull_request_id)
724 725 pull_request = PullRequest.get_or_404(pull_request_id)
725 726 if pull_request.is_closed():
726 727 raise HTTPForbidden()
727 728
728 729 # TODO: johbo: Re-think this bit, "approved_closed" does not exist
729 730 # as a changeset status, still we want to send it in one value.
730 731 status = request.POST.get('changeset_status', None)
731 732 text = request.POST.get('text')
732 733 if status and '_closed' in status:
733 734 close_pr = True
734 735 status = status.replace('_closed', '')
735 736 else:
736 737 close_pr = False
737 738
738 739 forced = (status == 'forced')
739 740 if forced:
740 741 status = 'rejected'
741 742
742 743 allowed_to_change_status = PullRequestModel().check_user_change_status(
743 744 pull_request, c.rhodecode_user)
744 745
745 746 if status and allowed_to_change_status:
746 747 message = (_('Status change %(transition_icon)s %(status)s')
747 748 % {'transition_icon': '>',
748 749 'status': ChangesetStatus.get_status_lbl(status)})
749 750 if close_pr:
750 751 message = _('Closing with') + ' ' + message
751 752 text = text or message
752 753 comm = ChangesetCommentsModel().create(
753 754 text=text,
754 755 repo=c.rhodecode_db_repo.repo_id,
755 756 user=c.rhodecode_user.user_id,
756 757 pull_request=pull_request_id,
757 758 f_path=request.POST.get('f_path'),
758 759 line_no=request.POST.get('line'),
759 760 status_change=(ChangesetStatus.get_status_lbl(status)
760 761 if status and allowed_to_change_status else None),
761 762 closing_pr=close_pr
762 763 )
763 764
764 765 if allowed_to_change_status:
765 766 old_calculated_status = pull_request.calculated_review_status()
766 767 # get status if set !
767 768 if status:
768 769 ChangesetStatusModel().set_status(
769 770 c.rhodecode_db_repo.repo_id,
770 771 status,
771 772 c.rhodecode_user.user_id,
772 773 comm,
773 774 pull_request=pull_request_id
774 775 )
775 776
776 777 Session().flush()
777 778 # we now calculate the status of pull request, and based on that
778 779 # calculation we set the commits status
779 780 calculated_status = pull_request.calculated_review_status()
780 781 if old_calculated_status != calculated_status:
781 782 PullRequestModel()._trigger_pull_request_hook(
782 783 pull_request, c.rhodecode_user, 'review_status_change')
783 784
784 785 calculated_status_lbl = ChangesetStatus.get_status_lbl(
785 786 calculated_status)
786 787
787 788 if close_pr:
788 789 status_completed = (
789 790 calculated_status in [ChangesetStatus.STATUS_APPROVED,
790 791 ChangesetStatus.STATUS_REJECTED])
791 792 if forced or status_completed:
792 793 PullRequestModel().close_pull_request(
793 794 pull_request_id, c.rhodecode_user)
794 795 else:
795 796 h.flash(_('Closing pull request on other statuses than '
796 797 'rejected or approved is forbidden. '
797 798 'Calculated status from all reviewers '
798 799 'is currently: %s') % calculated_status_lbl,
799 800 category='warning')
800 801
801 802 Session().commit()
802 803
803 804 if not request.is_xhr:
804 805 return redirect(h.url('pullrequest_show', repo_name=repo_name,
805 806 pull_request_id=pull_request_id))
806 807
807 808 data = {
808 809 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
809 810 }
810 811 if comm:
811 812 c.co = comm
812 813 data.update(comm.get_dict())
813 814 data.update({'rendered_text':
814 815 render('changeset/changeset_comment_block.html')})
815 816
816 817 return data
817 818
818 819 @LoginRequired()
819 820 @NotAnonymous()
820 821 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
821 822 'repository.admin')
822 823 @auth.CSRFRequired()
823 824 @jsonify
824 825 def delete_comment(self, repo_name, comment_id):
825 826 return self._delete_comment(comment_id)
826 827
827 828 def _delete_comment(self, comment_id):
828 829 comment_id = safe_int(comment_id)
829 830 co = ChangesetComment.get_or_404(comment_id)
830 831 if co.pull_request.is_closed():
831 832 # don't allow deleting comments on closed pull request
832 833 raise HTTPForbidden()
833 834
834 835 is_owner = co.author.user_id == c.rhodecode_user.user_id
835 836 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
836 837 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
837 838 old_calculated_status = co.pull_request.calculated_review_status()
838 839 ChangesetCommentsModel().delete(comment=co)
839 840 Session().commit()
840 841 calculated_status = co.pull_request.calculated_review_status()
841 842 if old_calculated_status != calculated_status:
842 843 PullRequestModel()._trigger_pull_request_hook(
843 844 co.pull_request, c.rhodecode_user, 'review_status_change')
844 845 return True
845 846 else:
846 847 raise HTTPForbidden()
@@ -1,59 +1,57 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from pyramid.threadlocal import get_current_registry
20 20
21 21
22 class RhodecodeEvent(object):
23 """
24 Base event class for all Rhodecode events
25 """
26
27
28 22 def trigger(event):
29 23 """
30 24 Helper method to send an event. This wraps the pyramid logic to send an
31 25 event.
32 26 """
33 27 # For the first step we are using pyramids thread locals here. If the
34 28 # event mechanism works out as a good solution we should think about
35 29 # passing the registry as an argument to get rid of it.
36 30 registry = get_current_registry()
37 31 registry.notify(event)
38 32
39 33
34 from rhodecode.events.base import RhodecodeEvent
35
40 36 from rhodecode.events.user import (
41 37 UserPreCreate,
42 38 UserPreUpdate,
43 39 UserRegistered
44 40 )
45 41
46 42 from rhodecode.events.repo import (
43 RepoEvent,
47 44 RepoPreCreateEvent, RepoCreatedEvent,
48 45 RepoPreDeleteEvent, RepoDeletedEvent,
49 46 RepoPrePushEvent, RepoPushEvent,
50 47 RepoPrePullEvent, RepoPullEvent,
51 48 )
52 49
53 50 from rhodecode.events.pullrequest import (
51 PullRequestEvent,
54 52 PullRequestCreateEvent,
55 53 PullRequestUpdateEvent,
56 54 PullRequestReviewEvent,
57 55 PullRequestMergeEvent,
58 56 PullRequestCloseEvent,
59 57 ) No newline at end of file
@@ -1,72 +1,97 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
20
19 21 from rhodecode.events.repo import RepoEvent
20 22
21 23
24 def get_pull_request_url(pull_request):
25 from rhodecode.model.pull_request import PullRequestModel
26 return PullRequestModel().get_url(pull_request)
27
28
29 class PullRequestSchema(Schema):
30 """
31 Marshmallow schema for a pull request
32 """
33 pull_request_id = fields.Integer()
34 url = fields.Function(get_pull_request_url)
35 title = fields.Str()
36
37
38 class PullRequestEventSchema(RepoEvent.MarshmallowSchema):
39 """
40 Marshmallow schema for a pull request event
41 """
42 pullrequest = fields.Nested(PullRequestSchema)
43
44
22 45 class PullRequestEvent(RepoEvent):
23 46 """
24 Base class for events acting on a repository.
47 Base class for pull request events.
25 48
26 :param repo: a :class:`Repository` instance
49 :param pullrequest: a :class:`PullRequest` instance
27 50 """
51 MarshmallowSchema = PullRequestEventSchema
52
28 53 def __init__(self, pullrequest):
29 54 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
30 55 self.pullrequest = pullrequest
31 56
32 57
33 58 class PullRequestCreateEvent(PullRequestEvent):
34 59 """
35 60 An instance of this class is emitted as an :term:`event` after a pull
36 61 request is created.
37 62 """
38 63 name = 'pullrequest-create'
39 64
40 65
41 66 class PullRequestCloseEvent(PullRequestEvent):
42 67 """
43 68 An instance of this class is emitted as an :term:`event` after a pull
44 69 request is closed.
45 70 """
46 71 name = 'pullrequest-close'
47 72
48 73
49 74 class PullRequestUpdateEvent(PullRequestEvent):
50 75 """
51 76 An instance of this class is emitted as an :term:`event` after a pull
52 77 request is updated.
53 78 """
54 79 name = 'pullrequest-update'
55 80
56 81
57 82 class PullRequestMergeEvent(PullRequestEvent):
58 83 """
59 84 An instance of this class is emitted as an :term:`event` after a pull
60 85 request is merged.
61 86 """
62 87 name = 'pullrequest-merge'
63 88
64 89
65 90 class PullRequestReviewEvent(PullRequestEvent):
66 91 """
67 92 An instance of this class is emitted as an :term:`event` after a pull
68 93 request is reviewed.
69 94 """
70 95 name = 'pullrequest-review'
71 96
72 97
@@ -1,113 +1,149 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
20
19 21 from rhodecode.model.db import Repository, Session
20 from rhodecode.events import RhodecodeEvent
22 from rhodecode.events.base import RhodecodeEvent
23
24
25 def get_pull_request_url(repo):
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
28
29
30 class RepositorySchema(Schema):
31 """
32 Marshmallow schema for a repository
33 """
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
36 url = fields.Function(get_pull_request_url)
37
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
41 Marshmallow schema for a repository event
42 """
43 repository = fields.Nested(RepositorySchema)
21 44
22 45
23 46 class RepoEvent(RhodecodeEvent):
24 47 """
25 48 Base class for events acting on a repository.
26 49
27 50 :param repo: a :class:`Repository` instance
28 51 """
52 MarshmallowSchema = RepoEventSchema
53
29 54 def __init__(self, repo):
55 super(RepoEvent, self).__init__()
30 56 self.repo = repo
31 57
32 58
33 59 class RepoPreCreateEvent(RepoEvent):
34 60 """
35 61 An instance of this class is emitted as an :term:`event` before a repo is
36 62 created.
37 63 """
38 64 name = 'repo-pre-create'
39 65
40 66
41 67 class RepoCreatedEvent(RepoEvent):
42 68 """
43 69 An instance of this class is emitted as an :term:`event` whenever a repo is
44 70 created.
45 71 """
46 72 name = 'repo-created'
47 73
48 74
49 75 class RepoPreDeleteEvent(RepoEvent):
50 76 """
51 77 An instance of this class is emitted as an :term:`event` whenever a repo is
52 78 created.
53 79 """
54 80 name = 'repo-pre-delete'
55 81
56 82
57 83 class RepoDeletedEvent(RepoEvent):
58 84 """
59 85 An instance of this class is emitted as an :term:`event` whenever a repo is
60 86 created.
61 87 """
62 88 name = 'repo-deleted'
63 89
64 90
65 91 class RepoVCSEvent(RepoEvent):
66 92 """
67 93 Base class for events triggered by the VCS
68 94 """
69 95 def __init__(self, repo_name, extras):
70 96 self.repo = Repository.get_by_repo_name(repo_name)
71 97 if not self.repo:
72 98 raise Exception('repo by this name %s does not exist' % repo_name)
73 99 self.extras = extras
74 100 super(RepoVCSEvent, self).__init__(self.repo)
75 101
102 @property
103 def acting_user(self):
104 if self.extras.get('username'):
105 return User.get_by_username(extras['username'])
106
107 @property
108 def acting_ip(self):
109 if self.extras.get('ip'):
110 return User.get_by_username(extras['ip'])
111
76 112
77 113 class RepoPrePullEvent(RepoVCSEvent):
78 114 """
79 115 An instance of this class is emitted as an :term:`event` before commits
80 116 are pulled from a repo.
81 117 """
82 118 name = 'repo-pre-pull'
83 119
84 120
85 121 class RepoPullEvent(RepoVCSEvent):
86 122 """
87 123 An instance of this class is emitted as an :term:`event` after commits
88 124 are pulled from a repo.
89 125 """
90 126 name = 'repo-pull'
91 127
92 128
93 129 class RepoPrePushEvent(RepoVCSEvent):
94 130 """
95 131 An instance of this class is emitted as an :term:`event` before commits
96 132 are pushed to a repo.
97 133 """
98 134 name = 'repo-pre-push'
99 135
100 136
101 137 class RepoPushEvent(RepoVCSEvent):
102 138 """
103 139 An instance of this class is emitted as an :term:`event` after commits
104 140 are pushed to a repo.
105 141
106 142 :param extras: (optional) dict of data from proxied VCS actions
107 143 """
108 144 name = 'repo-push'
109 145
110 146 def __init__(self, repo_name, pushed_commit_ids, extras):
111 147 super(RepoPushEvent, self).__init__(repo_name, extras)
112 148 self.pushed_commit_ids = pushed_commit_ids
113 149
@@ -1,54 +1,55 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from zope.interface import implementer
20 from rhodecode.events import RhodecodeEvent
20
21 from rhodecode.events.base import RhodecodeEvent
21 22 from rhodecode.events.interfaces import (
22 23 IUserRegistered, IUserPreCreate, IUserPreUpdate)
23 24
24 25
25 26 @implementer(IUserRegistered)
26 27 class UserRegistered(RhodecodeEvent):
27 28 """
28 29 An instance of this class is emitted as an :term:`event` whenever a user
29 30 account is registered.
30 31 """
31 32 def __init__(self, user, session):
32 33 self.user = user
33 34 self.session = session
34 35
35 36
36 37 @implementer(IUserPreCreate)
37 38 class UserPreCreate(RhodecodeEvent):
38 39 """
39 40 An instance of this class is emitted as an :term:`event` before a new user
40 41 object is created.
41 42 """
42 43 def __init__(self, user_data):
43 44 self.user_data = user_data
44 45
45 46
46 47 @implementer(IUserPreUpdate)
47 48 class UserPreUpdate(RhodecodeEvent):
48 49 """
49 50 An instance of this class is emitted as an :term:`event` before a user
50 51 object is updated.
51 52 """
52 53 def __init__(self, user, user_data):
53 54 self.user = user
54 55 self.user_data = user_data
@@ -1,3477 +1,3477 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import os
26 26 import sys
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.exc import IntegrityError
40 40 from sqlalchemy.ext.declarative import declared_attr
41 41 from sqlalchemy.ext.hybrid import hybrid_property
42 42 from sqlalchemy.orm import (
43 43 relationship, joinedload, class_mapper, validates, aliased)
44 44 from sqlalchemy.sql.expression import true
45 45 from beaker.cache import cache_region, region_invalidate
46 46 from webob.exc import HTTPNotFound
47 47 from zope.cachedescriptors.property import Lazy as LazyProperty
48 48
49 49 from pylons import url
50 50 from pylons.i18n.translation import lazy_ugettext as _
51 51
52 52 from rhodecode.lib.vcs import get_backend
53 53 from rhodecode.lib.vcs.utils.helpers import get_scm
54 54 from rhodecode.lib.vcs.exceptions import VCSError
55 55 from rhodecode.lib.vcs.backends.base import (
56 56 EmptyCommit, Reference, MergeFailureReason)
57 57 from rhodecode.lib.utils2 import (
58 58 str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe,
59 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict)
60 60 from rhodecode.lib.ext_json import json
61 61 from rhodecode.lib.caching_query import FromCache
62 62 from rhodecode.lib.encrypt import AESCipher
63 63
64 64 from rhodecode.model.meta import Base, Session
65 65
66 66 URL_SEP = '/'
67 67 log = logging.getLogger(__name__)
68 68
69 69 # =============================================================================
70 70 # BASE CLASSES
71 71 # =============================================================================
72 72
73 73 # this is propagated from .ini file rhodecode.encrypted_values.secret or
74 74 # beaker.session.secret if first is not set.
75 75 # and initialized at environment.py
76 76 ENCRYPTION_KEY = None
77 77
78 78 # used to sort permissions by types, '#' used here is not allowed to be in
79 79 # usernames, and it's very early in sorted string.printable table.
80 80 PERMISSION_TYPE_SORT = {
81 81 'admin': '####',
82 82 'write': '###',
83 83 'read': '##',
84 84 'none': '#',
85 85 }
86 86
87 87
88 88 def display_sort(obj):
89 89 """
90 90 Sort function used to sort permissions in .permissions() function of
91 91 Repository, RepoGroup, UserGroup. Also it put the default user in front
92 92 of all other resources
93 93 """
94 94
95 95 if obj.username == User.DEFAULT_USER:
96 96 return '#####'
97 97 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
98 98 return prefix + obj.username
99 99
100 100
101 101 def _hash_key(k):
102 102 return md5_safe(k)
103 103
104 104
105 105 class EncryptedTextValue(TypeDecorator):
106 106 """
107 107 Special column for encrypted long text data, use like::
108 108
109 109 value = Column("encrypted_value", EncryptedValue(), nullable=False)
110 110
111 111 This column is intelligent so if value is in unencrypted form it return
112 112 unencrypted form, but on save it always encrypts
113 113 """
114 114 impl = Text
115 115
116 116 def process_bind_param(self, value, dialect):
117 117 if not value:
118 118 return value
119 119 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
120 120 # protect against double encrypting if someone manually starts
121 121 # doing
122 122 raise ValueError('value needs to be in unencrypted format, ie. '
123 123 'not starting with enc$aes')
124 124 return 'enc$aes_hmac$%s' % AESCipher(
125 125 ENCRYPTION_KEY, hmac=True).encrypt(value)
126 126
127 127 def process_result_value(self, value, dialect):
128 128 import rhodecode
129 129
130 130 if not value:
131 131 return value
132 132
133 133 parts = value.split('$', 3)
134 134 if not len(parts) == 3:
135 135 # probably not encrypted values
136 136 return value
137 137 else:
138 138 if parts[0] != 'enc':
139 139 # parts ok but without our header ?
140 140 return value
141 141 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
142 142 'rhodecode.encrypted_values.strict') or True)
143 143 # at that stage we know it's our encryption
144 144 if parts[1] == 'aes':
145 145 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
146 146 elif parts[1] == 'aes_hmac':
147 147 decrypted_data = AESCipher(
148 148 ENCRYPTION_KEY, hmac=True,
149 149 strict_verification=enc_strict_mode).decrypt(parts[2])
150 150 else:
151 151 raise ValueError(
152 152 'Encryption type part is wrong, must be `aes` '
153 153 'or `aes_hmac`, got `%s` instead' % (parts[1]))
154 154 return decrypted_data
155 155
156 156
157 157 class BaseModel(object):
158 158 """
159 159 Base Model for all classes
160 160 """
161 161
162 162 @classmethod
163 163 def _get_keys(cls):
164 164 """return column names for this model """
165 165 return class_mapper(cls).c.keys()
166 166
167 167 def get_dict(self):
168 168 """
169 169 return dict with keys and values corresponding
170 170 to this model data """
171 171
172 172 d = {}
173 173 for k in self._get_keys():
174 174 d[k] = getattr(self, k)
175 175
176 176 # also use __json__() if present to get additional fields
177 177 _json_attr = getattr(self, '__json__', None)
178 178 if _json_attr:
179 179 # update with attributes from __json__
180 180 if callable(_json_attr):
181 181 _json_attr = _json_attr()
182 182 for k, val in _json_attr.iteritems():
183 183 d[k] = val
184 184 return d
185 185
186 186 def get_appstruct(self):
187 187 """return list with keys and values tuples corresponding
188 188 to this model data """
189 189
190 190 l = []
191 191 for k in self._get_keys():
192 192 l.append((k, getattr(self, k),))
193 193 return l
194 194
195 195 def populate_obj(self, populate_dict):
196 196 """populate model with data from given populate_dict"""
197 197
198 198 for k in self._get_keys():
199 199 if k in populate_dict:
200 200 setattr(self, k, populate_dict[k])
201 201
202 202 @classmethod
203 203 def query(cls):
204 204 return Session().query(cls)
205 205
206 206 @classmethod
207 207 def get(cls, id_):
208 208 if id_:
209 209 return cls.query().get(id_)
210 210
211 211 @classmethod
212 212 def get_or_404(cls, id_):
213 213 try:
214 214 id_ = int(id_)
215 215 except (TypeError, ValueError):
216 216 raise HTTPNotFound
217 217
218 218 res = cls.query().get(id_)
219 219 if not res:
220 220 raise HTTPNotFound
221 221 return res
222 222
223 223 @classmethod
224 224 def getAll(cls):
225 225 # deprecated and left for backward compatibility
226 226 return cls.get_all()
227 227
228 228 @classmethod
229 229 def get_all(cls):
230 230 return cls.query().all()
231 231
232 232 @classmethod
233 233 def delete(cls, id_):
234 234 obj = cls.query().get(id_)
235 235 Session().delete(obj)
236 236
237 237 @classmethod
238 238 def identity_cache(cls, session, attr_name, value):
239 239 exist_in_session = []
240 240 for (item_cls, pkey), instance in session.identity_map.items():
241 241 if cls == item_cls and getattr(instance, attr_name) == value:
242 242 exist_in_session.append(instance)
243 243 if exist_in_session:
244 244 if len(exist_in_session) == 1:
245 245 return exist_in_session[0]
246 246 log.exception(
247 247 'multiple objects with attr %s and '
248 248 'value %s found with same name: %r',
249 249 attr_name, value, exist_in_session)
250 250
251 251 def __repr__(self):
252 252 if hasattr(self, '__unicode__'):
253 253 # python repr needs to return str
254 254 try:
255 255 return safe_str(self.__unicode__())
256 256 except UnicodeDecodeError:
257 257 pass
258 258 return '<DB:%s>' % (self.__class__.__name__)
259 259
260 260
261 261 class RhodeCodeSetting(Base, BaseModel):
262 262 __tablename__ = 'rhodecode_settings'
263 263 __table_args__ = (
264 264 UniqueConstraint('app_settings_name'),
265 265 {'extend_existing': True, 'mysql_engine': 'InnoDB',
266 266 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
267 267 )
268 268
269 269 SETTINGS_TYPES = {
270 270 'str': safe_str,
271 271 'int': safe_int,
272 272 'unicode': safe_unicode,
273 273 'bool': str2bool,
274 274 'list': functools.partial(aslist, sep=',')
275 275 }
276 276 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
277 277 GLOBAL_CONF_KEY = 'app_settings'
278 278
279 279 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
280 280 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
281 281 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
282 282 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
283 283
284 284 def __init__(self, key='', val='', type='unicode'):
285 285 self.app_settings_name = key
286 286 self.app_settings_type = type
287 287 self.app_settings_value = val
288 288
289 289 @validates('_app_settings_value')
290 290 def validate_settings_value(self, key, val):
291 291 assert type(val) == unicode
292 292 return val
293 293
294 294 @hybrid_property
295 295 def app_settings_value(self):
296 296 v = self._app_settings_value
297 297 _type = self.app_settings_type
298 298 if _type:
299 299 _type = self.app_settings_type.split('.')[0]
300 300 # decode the encrypted value
301 301 if 'encrypted' in self.app_settings_type:
302 302 cipher = EncryptedTextValue()
303 303 v = safe_unicode(cipher.process_result_value(v, None))
304 304
305 305 converter = self.SETTINGS_TYPES.get(_type) or \
306 306 self.SETTINGS_TYPES['unicode']
307 307 return converter(v)
308 308
309 309 @app_settings_value.setter
310 310 def app_settings_value(self, val):
311 311 """
312 312 Setter that will always make sure we use unicode in app_settings_value
313 313
314 314 :param val:
315 315 """
316 316 val = safe_unicode(val)
317 317 # encode the encrypted value
318 318 if 'encrypted' in self.app_settings_type:
319 319 cipher = EncryptedTextValue()
320 320 val = safe_unicode(cipher.process_bind_param(val, None))
321 321 self._app_settings_value = val
322 322
323 323 @hybrid_property
324 324 def app_settings_type(self):
325 325 return self._app_settings_type
326 326
327 327 @app_settings_type.setter
328 328 def app_settings_type(self, val):
329 329 if val.split('.')[0] not in self.SETTINGS_TYPES:
330 330 raise Exception('type must be one of %s got %s'
331 331 % (self.SETTINGS_TYPES.keys(), val))
332 332 self._app_settings_type = val
333 333
334 334 def __unicode__(self):
335 335 return u"<%s('%s:%s[%s]')>" % (
336 336 self.__class__.__name__,
337 337 self.app_settings_name, self.app_settings_value,
338 338 self.app_settings_type
339 339 )
340 340
341 341
342 342 class RhodeCodeUi(Base, BaseModel):
343 343 __tablename__ = 'rhodecode_ui'
344 344 __table_args__ = (
345 345 UniqueConstraint('ui_key'),
346 346 {'extend_existing': True, 'mysql_engine': 'InnoDB',
347 347 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
348 348 )
349 349
350 350 HOOK_REPO_SIZE = 'changegroup.repo_size'
351 351 # HG
352 352 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
353 353 HOOK_PULL = 'outgoing.pull_logger'
354 354 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
355 355 HOOK_PUSH = 'changegroup.push_logger'
356 356
357 357 # TODO: johbo: Unify way how hooks are configured for git and hg,
358 358 # git part is currently hardcoded.
359 359
360 360 # SVN PATTERNS
361 361 SVN_BRANCH_ID = 'vcs_svn_branch'
362 362 SVN_TAG_ID = 'vcs_svn_tag'
363 363
364 364 ui_id = Column(
365 365 "ui_id", Integer(), nullable=False, unique=True, default=None,
366 366 primary_key=True)
367 367 ui_section = Column(
368 368 "ui_section", String(255), nullable=True, unique=None, default=None)
369 369 ui_key = Column(
370 370 "ui_key", String(255), nullable=True, unique=None, default=None)
371 371 ui_value = Column(
372 372 "ui_value", String(255), nullable=True, unique=None, default=None)
373 373 ui_active = Column(
374 374 "ui_active", Boolean(), nullable=True, unique=None, default=True)
375 375
376 376 def __repr__(self):
377 377 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
378 378 self.ui_key, self.ui_value)
379 379
380 380
381 381 class RepoRhodeCodeSetting(Base, BaseModel):
382 382 __tablename__ = 'repo_rhodecode_settings'
383 383 __table_args__ = (
384 384 UniqueConstraint(
385 385 'app_settings_name', 'repository_id',
386 386 name='uq_repo_rhodecode_setting_name_repo_id'),
387 387 {'extend_existing': True, 'mysql_engine': 'InnoDB',
388 388 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
389 389 )
390 390
391 391 repository_id = Column(
392 392 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
393 393 nullable=False)
394 394 app_settings_id = Column(
395 395 "app_settings_id", Integer(), nullable=False, unique=True,
396 396 default=None, primary_key=True)
397 397 app_settings_name = Column(
398 398 "app_settings_name", String(255), nullable=True, unique=None,
399 399 default=None)
400 400 _app_settings_value = Column(
401 401 "app_settings_value", String(4096), nullable=True, unique=None,
402 402 default=None)
403 403 _app_settings_type = Column(
404 404 "app_settings_type", String(255), nullable=True, unique=None,
405 405 default=None)
406 406
407 407 repository = relationship('Repository')
408 408
409 409 def __init__(self, repository_id, key='', val='', type='unicode'):
410 410 self.repository_id = repository_id
411 411 self.app_settings_name = key
412 412 self.app_settings_type = type
413 413 self.app_settings_value = val
414 414
415 415 @validates('_app_settings_value')
416 416 def validate_settings_value(self, key, val):
417 417 assert type(val) == unicode
418 418 return val
419 419
420 420 @hybrid_property
421 421 def app_settings_value(self):
422 422 v = self._app_settings_value
423 423 type_ = self.app_settings_type
424 424 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
425 425 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
426 426 return converter(v)
427 427
428 428 @app_settings_value.setter
429 429 def app_settings_value(self, val):
430 430 """
431 431 Setter that will always make sure we use unicode in app_settings_value
432 432
433 433 :param val:
434 434 """
435 435 self._app_settings_value = safe_unicode(val)
436 436
437 437 @hybrid_property
438 438 def app_settings_type(self):
439 439 return self._app_settings_type
440 440
441 441 @app_settings_type.setter
442 442 def app_settings_type(self, val):
443 443 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
444 444 if val not in SETTINGS_TYPES:
445 445 raise Exception('type must be one of %s got %s'
446 446 % (SETTINGS_TYPES.keys(), val))
447 447 self._app_settings_type = val
448 448
449 449 def __unicode__(self):
450 450 return u"<%s('%s:%s:%s[%s]')>" % (
451 451 self.__class__.__name__, self.repository.repo_name,
452 452 self.app_settings_name, self.app_settings_value,
453 453 self.app_settings_type
454 454 )
455 455
456 456
457 457 class RepoRhodeCodeUi(Base, BaseModel):
458 458 __tablename__ = 'repo_rhodecode_ui'
459 459 __table_args__ = (
460 460 UniqueConstraint(
461 461 'repository_id', 'ui_section', 'ui_key',
462 462 name='uq_repo_rhodecode_ui_repository_id_section_key'),
463 463 {'extend_existing': True, 'mysql_engine': 'InnoDB',
464 464 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
465 465 )
466 466
467 467 repository_id = Column(
468 468 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
469 469 nullable=False)
470 470 ui_id = Column(
471 471 "ui_id", Integer(), nullable=False, unique=True, default=None,
472 472 primary_key=True)
473 473 ui_section = Column(
474 474 "ui_section", String(255), nullable=True, unique=None, default=None)
475 475 ui_key = Column(
476 476 "ui_key", String(255), nullable=True, unique=None, default=None)
477 477 ui_value = Column(
478 478 "ui_value", String(255), nullable=True, unique=None, default=None)
479 479 ui_active = Column(
480 480 "ui_active", Boolean(), nullable=True, unique=None, default=True)
481 481
482 482 repository = relationship('Repository')
483 483
484 484 def __repr__(self):
485 485 return '<%s[%s:%s]%s=>%s]>' % (
486 486 self.__class__.__name__, self.repository.repo_name,
487 487 self.ui_section, self.ui_key, self.ui_value)
488 488
489 489
490 490 class User(Base, BaseModel):
491 491 __tablename__ = 'users'
492 492 __table_args__ = (
493 493 UniqueConstraint('username'), UniqueConstraint('email'),
494 494 Index('u_username_idx', 'username'),
495 495 Index('u_email_idx', 'email'),
496 496 {'extend_existing': True, 'mysql_engine': 'InnoDB',
497 497 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
498 498 )
499 499 DEFAULT_USER = 'default'
500 500 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
501 501 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
502 502
503 503 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
504 504 username = Column("username", String(255), nullable=True, unique=None, default=None)
505 505 password = Column("password", String(255), nullable=True, unique=None, default=None)
506 506 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
507 507 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
508 508 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
509 509 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
510 510 _email = Column("email", String(255), nullable=True, unique=None, default=None)
511 511 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
512 512 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
513 513 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
514 514 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
515 515 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
516 516 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
517 517 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
518 518
519 519 user_log = relationship('UserLog')
520 520 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
521 521
522 522 repositories = relationship('Repository')
523 523 repository_groups = relationship('RepoGroup')
524 524 user_groups = relationship('UserGroup')
525 525
526 526 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
527 527 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
528 528
529 529 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
530 530 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
531 531 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
532 532
533 533 group_member = relationship('UserGroupMember', cascade='all')
534 534
535 535 notifications = relationship('UserNotification', cascade='all')
536 536 # notifications assigned to this user
537 537 user_created_notifications = relationship('Notification', cascade='all')
538 538 # comments created by this user
539 539 user_comments = relationship('ChangesetComment', cascade='all')
540 540 # user profile extra info
541 541 user_emails = relationship('UserEmailMap', cascade='all')
542 542 user_ip_map = relationship('UserIpMap', cascade='all')
543 543 user_auth_tokens = relationship('UserApiKeys', cascade='all')
544 544 # gists
545 545 user_gists = relationship('Gist', cascade='all')
546 546 # user pull requests
547 547 user_pull_requests = relationship('PullRequest', cascade='all')
548 548 # external identities
549 549 extenal_identities = relationship(
550 550 'ExternalIdentity',
551 551 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
552 552 cascade='all')
553 553
554 554 def __unicode__(self):
555 555 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
556 556 self.user_id, self.username)
557 557
558 558 @hybrid_property
559 559 def email(self):
560 560 return self._email
561 561
562 562 @email.setter
563 563 def email(self, val):
564 564 self._email = val.lower() if val else None
565 565
566 566 @property
567 567 def firstname(self):
568 568 # alias for future
569 569 return self.name
570 570
571 571 @property
572 572 def emails(self):
573 573 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
574 574 return [self.email] + [x.email for x in other]
575 575
576 576 @property
577 577 def auth_tokens(self):
578 578 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
579 579
580 580 @property
581 581 def extra_auth_tokens(self):
582 582 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
583 583
584 584 @property
585 585 def feed_token(self):
586 586 feed_tokens = UserApiKeys.query()\
587 587 .filter(UserApiKeys.user == self)\
588 588 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
589 589 .all()
590 590 if feed_tokens:
591 591 return feed_tokens[0].api_key
592 592 else:
593 593 # use the main token so we don't end up with nothing...
594 594 return self.api_key
595 595
596 596 @classmethod
597 597 def extra_valid_auth_tokens(cls, user, role=None):
598 598 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
599 599 .filter(or_(UserApiKeys.expires == -1,
600 600 UserApiKeys.expires >= time.time()))
601 601 if role:
602 602 tokens = tokens.filter(or_(UserApiKeys.role == role,
603 603 UserApiKeys.role == UserApiKeys.ROLE_ALL))
604 604 return tokens.all()
605 605
606 606 @property
607 607 def ip_addresses(self):
608 608 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
609 609 return [x.ip_addr for x in ret]
610 610
611 611 @property
612 612 def username_and_name(self):
613 613 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
614 614
615 615 @property
616 616 def username_or_name_or_email(self):
617 617 full_name = self.full_name if self.full_name is not ' ' else None
618 618 return self.username or full_name or self.email
619 619
620 620 @property
621 621 def full_name(self):
622 622 return '%s %s' % (self.firstname, self.lastname)
623 623
624 624 @property
625 625 def full_name_or_username(self):
626 626 return ('%s %s' % (self.firstname, self.lastname)
627 627 if (self.firstname and self.lastname) else self.username)
628 628
629 629 @property
630 630 def full_contact(self):
631 631 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
632 632
633 633 @property
634 634 def short_contact(self):
635 635 return '%s %s' % (self.firstname, self.lastname)
636 636
637 637 @property
638 638 def is_admin(self):
639 639 return self.admin
640 640
641 641 @property
642 642 def AuthUser(self):
643 643 """
644 644 Returns instance of AuthUser for this user
645 645 """
646 646 from rhodecode.lib.auth import AuthUser
647 647 return AuthUser(user_id=self.user_id, api_key=self.api_key,
648 648 username=self.username)
649 649
650 650 @hybrid_property
651 651 def user_data(self):
652 652 if not self._user_data:
653 653 return {}
654 654
655 655 try:
656 656 return json.loads(self._user_data)
657 657 except TypeError:
658 658 return {}
659 659
660 660 @user_data.setter
661 661 def user_data(self, val):
662 662 if not isinstance(val, dict):
663 663 raise Exception('user_data must be dict, got %s' % type(val))
664 664 try:
665 665 self._user_data = json.dumps(val)
666 666 except Exception:
667 667 log.error(traceback.format_exc())
668 668
669 669 @classmethod
670 670 def get_by_username(cls, username, case_insensitive=False,
671 671 cache=False, identity_cache=False):
672 672 session = Session()
673 673
674 674 if case_insensitive:
675 675 q = cls.query().filter(
676 676 func.lower(cls.username) == func.lower(username))
677 677 else:
678 678 q = cls.query().filter(cls.username == username)
679 679
680 680 if cache:
681 681 if identity_cache:
682 682 val = cls.identity_cache(session, 'username', username)
683 683 if val:
684 684 return val
685 685 else:
686 686 q = q.options(
687 687 FromCache("sql_cache_short",
688 688 "get_user_by_name_%s" % _hash_key(username)))
689 689
690 690 return q.scalar()
691 691
692 692 @classmethod
693 693 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
694 694 q = cls.query().filter(cls.api_key == auth_token)
695 695
696 696 if cache:
697 697 q = q.options(FromCache("sql_cache_short",
698 698 "get_auth_token_%s" % auth_token))
699 699 res = q.scalar()
700 700
701 701 if fallback and not res:
702 702 #fallback to additional keys
703 703 _res = UserApiKeys.query()\
704 704 .filter(UserApiKeys.api_key == auth_token)\
705 705 .filter(or_(UserApiKeys.expires == -1,
706 706 UserApiKeys.expires >= time.time()))\
707 707 .first()
708 708 if _res:
709 709 res = _res.user
710 710 return res
711 711
712 712 @classmethod
713 713 def get_by_email(cls, email, case_insensitive=False, cache=False):
714 714
715 715 if case_insensitive:
716 716 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
717 717
718 718 else:
719 719 q = cls.query().filter(cls.email == email)
720 720
721 721 if cache:
722 722 q = q.options(FromCache("sql_cache_short",
723 723 "get_email_key_%s" % email))
724 724
725 725 ret = q.scalar()
726 726 if ret is None:
727 727 q = UserEmailMap.query()
728 728 # try fetching in alternate email map
729 729 if case_insensitive:
730 730 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
731 731 else:
732 732 q = q.filter(UserEmailMap.email == email)
733 733 q = q.options(joinedload(UserEmailMap.user))
734 734 if cache:
735 735 q = q.options(FromCache("sql_cache_short",
736 736 "get_email_map_key_%s" % email))
737 737 ret = getattr(q.scalar(), 'user', None)
738 738
739 739 return ret
740 740
741 741 @classmethod
742 742 def get_from_cs_author(cls, author):
743 743 """
744 744 Tries to get User objects out of commit author string
745 745
746 746 :param author:
747 747 """
748 748 from rhodecode.lib.helpers import email, author_name
749 749 # Valid email in the attribute passed, see if they're in the system
750 750 _email = email(author)
751 751 if _email:
752 752 user = cls.get_by_email(_email, case_insensitive=True)
753 753 if user:
754 754 return user
755 755 # Maybe we can match by username?
756 756 _author = author_name(author)
757 757 user = cls.get_by_username(_author, case_insensitive=True)
758 758 if user:
759 759 return user
760 760
761 761 def update_userdata(self, **kwargs):
762 762 usr = self
763 763 old = usr.user_data
764 764 old.update(**kwargs)
765 765 usr.user_data = old
766 766 Session().add(usr)
767 767 log.debug('updated userdata with ', kwargs)
768 768
769 769 def update_lastlogin(self):
770 770 """Update user lastlogin"""
771 771 self.last_login = datetime.datetime.now()
772 772 Session().add(self)
773 773 log.debug('updated user %s lastlogin', self.username)
774 774
775 775 def update_lastactivity(self):
776 776 """Update user lastactivity"""
777 777 usr = self
778 778 old = usr.user_data
779 779 old.update({'last_activity': time.time()})
780 780 usr.user_data = old
781 781 Session().add(usr)
782 782 log.debug('updated user %s lastactivity', usr.username)
783 783
784 784 def update_password(self, new_password, change_api_key=False):
785 785 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
786 786
787 787 self.password = get_crypt_password(new_password)
788 788 if change_api_key:
789 789 self.api_key = generate_auth_token(self.username)
790 790 Session().add(self)
791 791
792 792 @classmethod
793 793 def get_first_super_admin(cls):
794 794 user = User.query().filter(User.admin == true()).first()
795 795 if user is None:
796 796 raise Exception('FATAL: Missing administrative account!')
797 797 return user
798 798
799 799 @classmethod
800 800 def get_all_super_admins(cls):
801 801 """
802 802 Returns all admin accounts sorted by username
803 803 """
804 804 return User.query().filter(User.admin == true())\
805 805 .order_by(User.username.asc()).all()
806 806
807 807 @classmethod
808 808 def get_default_user(cls, cache=False):
809 809 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
810 810 if user is None:
811 811 raise Exception('FATAL: Missing default account!')
812 812 return user
813 813
814 814 def _get_default_perms(self, user, suffix=''):
815 815 from rhodecode.model.permission import PermissionModel
816 816 return PermissionModel().get_default_perms(user.user_perms, suffix)
817 817
818 818 def get_default_perms(self, suffix=''):
819 819 return self._get_default_perms(self, suffix)
820 820
821 821 def get_api_data(self, include_secrets=False, details='full'):
822 822 """
823 823 Common function for generating user related data for API
824 824
825 825 :param include_secrets: By default secrets in the API data will be replaced
826 826 by a placeholder value to prevent exposing this data by accident. In case
827 827 this data shall be exposed, set this flag to ``True``.
828 828
829 829 :param details: details can be 'basic|full' basic gives only a subset of
830 830 the available user information that includes user_id, name and emails.
831 831 """
832 832 user = self
833 833 user_data = self.user_data
834 834 data = {
835 835 'user_id': user.user_id,
836 836 'username': user.username,
837 837 'firstname': user.name,
838 838 'lastname': user.lastname,
839 839 'email': user.email,
840 840 'emails': user.emails,
841 841 }
842 842 if details == 'basic':
843 843 return data
844 844
845 845 api_key_length = 40
846 846 api_key_replacement = '*' * api_key_length
847 847
848 848 extras = {
849 849 'api_key': api_key_replacement,
850 850 'api_keys': [api_key_replacement],
851 851 'active': user.active,
852 852 'admin': user.admin,
853 853 'extern_type': user.extern_type,
854 854 'extern_name': user.extern_name,
855 855 'last_login': user.last_login,
856 856 'ip_addresses': user.ip_addresses,
857 857 'language': user_data.get('language')
858 858 }
859 859 data.update(extras)
860 860
861 861 if include_secrets:
862 862 data['api_key'] = user.api_key
863 863 data['api_keys'] = user.auth_tokens
864 864 return data
865 865
866 866 def __json__(self):
867 867 data = {
868 868 'full_name': self.full_name,
869 869 'full_name_or_username': self.full_name_or_username,
870 870 'short_contact': self.short_contact,
871 871 'full_contact': self.full_contact,
872 872 }
873 873 data.update(self.get_api_data())
874 874 return data
875 875
876 876
877 877 class UserApiKeys(Base, BaseModel):
878 878 __tablename__ = 'user_api_keys'
879 879 __table_args__ = (
880 880 Index('uak_api_key_idx', 'api_key'),
881 881 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
882 882 UniqueConstraint('api_key'),
883 883 {'extend_existing': True, 'mysql_engine': 'InnoDB',
884 884 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
885 885 )
886 886 __mapper_args__ = {}
887 887
888 888 # ApiKey role
889 889 ROLE_ALL = 'token_role_all'
890 890 ROLE_HTTP = 'token_role_http'
891 891 ROLE_VCS = 'token_role_vcs'
892 892 ROLE_API = 'token_role_api'
893 893 ROLE_FEED = 'token_role_feed'
894 894 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
895 895
896 896 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
897 897 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
898 898 api_key = Column("api_key", String(255), nullable=False, unique=True)
899 899 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
900 900 expires = Column('expires', Float(53), nullable=False)
901 901 role = Column('role', String(255), nullable=True)
902 902 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
903 903
904 904 user = relationship('User', lazy='joined')
905 905
906 906 @classmethod
907 907 def _get_role_name(cls, role):
908 908 return {
909 909 cls.ROLE_ALL: _('all'),
910 910 cls.ROLE_HTTP: _('http/web interface'),
911 911 cls.ROLE_VCS: _('vcs (git/hg protocol)'),
912 912 cls.ROLE_API: _('api calls'),
913 913 cls.ROLE_FEED: _('feed access'),
914 914 }.get(role, role)
915 915
916 916 @property
917 917 def expired(self):
918 918 if self.expires == -1:
919 919 return False
920 920 return time.time() > self.expires
921 921
922 922 @property
923 923 def role_humanized(self):
924 924 return self._get_role_name(self.role)
925 925
926 926
927 927 class UserEmailMap(Base, BaseModel):
928 928 __tablename__ = 'user_email_map'
929 929 __table_args__ = (
930 930 Index('uem_email_idx', 'email'),
931 931 UniqueConstraint('email'),
932 932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
933 933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
934 934 )
935 935 __mapper_args__ = {}
936 936
937 937 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
938 938 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
939 939 _email = Column("email", String(255), nullable=True, unique=False, default=None)
940 940 user = relationship('User', lazy='joined')
941 941
942 942 @validates('_email')
943 943 def validate_email(self, key, email):
944 944 # check if this email is not main one
945 945 main_email = Session().query(User).filter(User.email == email).scalar()
946 946 if main_email is not None:
947 947 raise AttributeError('email %s is present is user table' % email)
948 948 return email
949 949
950 950 @hybrid_property
951 951 def email(self):
952 952 return self._email
953 953
954 954 @email.setter
955 955 def email(self, val):
956 956 self._email = val.lower() if val else None
957 957
958 958
959 959 class UserIpMap(Base, BaseModel):
960 960 __tablename__ = 'user_ip_map'
961 961 __table_args__ = (
962 962 UniqueConstraint('user_id', 'ip_addr'),
963 963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
964 964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
965 965 )
966 966 __mapper_args__ = {}
967 967
968 968 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
969 969 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
970 970 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
971 971 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
972 972 description = Column("description", String(10000), nullable=True, unique=None, default=None)
973 973 user = relationship('User', lazy='joined')
974 974
975 975 @classmethod
976 976 def _get_ip_range(cls, ip_addr):
977 977 net = ipaddress.ip_network(ip_addr, strict=False)
978 978 return [str(net.network_address), str(net.broadcast_address)]
979 979
980 980 def __json__(self):
981 981 return {
982 982 'ip_addr': self.ip_addr,
983 983 'ip_range': self._get_ip_range(self.ip_addr),
984 984 }
985 985
986 986 def __unicode__(self):
987 987 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
988 988 self.user_id, self.ip_addr)
989 989
990 990 class UserLog(Base, BaseModel):
991 991 __tablename__ = 'user_logs'
992 992 __table_args__ = (
993 993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
994 994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
995 995 )
996 996 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
997 997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
998 998 username = Column("username", String(255), nullable=True, unique=None, default=None)
999 999 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1000 1000 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1001 1001 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1002 1002 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1003 1003 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1004 1004
1005 1005 def __unicode__(self):
1006 1006 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1007 1007 self.repository_name,
1008 1008 self.action)
1009 1009
1010 1010 @property
1011 1011 def action_as_day(self):
1012 1012 return datetime.date(*self.action_date.timetuple()[:3])
1013 1013
1014 1014 user = relationship('User')
1015 1015 repository = relationship('Repository', cascade='')
1016 1016
1017 1017
1018 1018 class UserGroup(Base, BaseModel):
1019 1019 __tablename__ = 'users_groups'
1020 1020 __table_args__ = (
1021 1021 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1022 1022 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1023 1023 )
1024 1024
1025 1025 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1026 1026 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1027 1027 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1028 1028 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1029 1029 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1030 1030 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1031 1031 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1032 1032 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1033 1033
1034 1034 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1035 1035 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1036 1036 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1037 1037 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1038 1038 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1039 1039 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1040 1040
1041 1041 user = relationship('User')
1042 1042
1043 1043 @hybrid_property
1044 1044 def group_data(self):
1045 1045 if not self._group_data:
1046 1046 return {}
1047 1047
1048 1048 try:
1049 1049 return json.loads(self._group_data)
1050 1050 except TypeError:
1051 1051 return {}
1052 1052
1053 1053 @group_data.setter
1054 1054 def group_data(self, val):
1055 1055 try:
1056 1056 self._group_data = json.dumps(val)
1057 1057 except Exception:
1058 1058 log.error(traceback.format_exc())
1059 1059
1060 1060 def __unicode__(self):
1061 1061 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1062 1062 self.users_group_id,
1063 1063 self.users_group_name)
1064 1064
1065 1065 @classmethod
1066 1066 def get_by_group_name(cls, group_name, cache=False,
1067 1067 case_insensitive=False):
1068 1068 if case_insensitive:
1069 1069 q = cls.query().filter(func.lower(cls.users_group_name) ==
1070 1070 func.lower(group_name))
1071 1071
1072 1072 else:
1073 1073 q = cls.query().filter(cls.users_group_name == group_name)
1074 1074 if cache:
1075 1075 q = q.options(FromCache(
1076 1076 "sql_cache_short",
1077 1077 "get_group_%s" % _hash_key(group_name)))
1078 1078 return q.scalar()
1079 1079
1080 1080 @classmethod
1081 1081 def get(cls, user_group_id, cache=False):
1082 1082 user_group = cls.query()
1083 1083 if cache:
1084 1084 user_group = user_group.options(FromCache("sql_cache_short",
1085 1085 "get_users_group_%s" % user_group_id))
1086 1086 return user_group.get(user_group_id)
1087 1087
1088 1088 def permissions(self, with_admins=True, with_owner=True):
1089 1089 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1090 1090 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1091 1091 joinedload(UserUserGroupToPerm.user),
1092 1092 joinedload(UserUserGroupToPerm.permission),)
1093 1093
1094 1094 # get owners and admins and permissions. We do a trick of re-writing
1095 1095 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1096 1096 # has a global reference and changing one object propagates to all
1097 1097 # others. This means if admin is also an owner admin_row that change
1098 1098 # would propagate to both objects
1099 1099 perm_rows = []
1100 1100 for _usr in q.all():
1101 1101 usr = AttributeDict(_usr.user.get_dict())
1102 1102 usr.permission = _usr.permission.permission_name
1103 1103 perm_rows.append(usr)
1104 1104
1105 1105 # filter the perm rows by 'default' first and then sort them by
1106 1106 # admin,write,read,none permissions sorted again alphabetically in
1107 1107 # each group
1108 1108 perm_rows = sorted(perm_rows, key=display_sort)
1109 1109
1110 1110 _admin_perm = 'usergroup.admin'
1111 1111 owner_row = []
1112 1112 if with_owner:
1113 1113 usr = AttributeDict(self.user.get_dict())
1114 1114 usr.owner_row = True
1115 1115 usr.permission = _admin_perm
1116 1116 owner_row.append(usr)
1117 1117
1118 1118 super_admin_rows = []
1119 1119 if with_admins:
1120 1120 for usr in User.get_all_super_admins():
1121 1121 # if this admin is also owner, don't double the record
1122 1122 if usr.user_id == owner_row[0].user_id:
1123 1123 owner_row[0].admin_row = True
1124 1124 else:
1125 1125 usr = AttributeDict(usr.get_dict())
1126 1126 usr.admin_row = True
1127 1127 usr.permission = _admin_perm
1128 1128 super_admin_rows.append(usr)
1129 1129
1130 1130 return super_admin_rows + owner_row + perm_rows
1131 1131
1132 1132 def permission_user_groups(self):
1133 1133 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1134 1134 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1135 1135 joinedload(UserGroupUserGroupToPerm.target_user_group),
1136 1136 joinedload(UserGroupUserGroupToPerm.permission),)
1137 1137
1138 1138 perm_rows = []
1139 1139 for _user_group in q.all():
1140 1140 usr = AttributeDict(_user_group.user_group.get_dict())
1141 1141 usr.permission = _user_group.permission.permission_name
1142 1142 perm_rows.append(usr)
1143 1143
1144 1144 return perm_rows
1145 1145
1146 1146 def _get_default_perms(self, user_group, suffix=''):
1147 1147 from rhodecode.model.permission import PermissionModel
1148 1148 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1149 1149
1150 1150 def get_default_perms(self, suffix=''):
1151 1151 return self._get_default_perms(self, suffix)
1152 1152
1153 1153 def get_api_data(self, with_group_members=True, include_secrets=False):
1154 1154 """
1155 1155 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1156 1156 basically forwarded.
1157 1157
1158 1158 """
1159 1159 user_group = self
1160 1160
1161 1161 data = {
1162 1162 'users_group_id': user_group.users_group_id,
1163 1163 'group_name': user_group.users_group_name,
1164 1164 'group_description': user_group.user_group_description,
1165 1165 'active': user_group.users_group_active,
1166 1166 'owner': user_group.user.username,
1167 1167 }
1168 1168 if with_group_members:
1169 1169 users = []
1170 1170 for user in user_group.members:
1171 1171 user = user.user
1172 1172 users.append(user.get_api_data(include_secrets=include_secrets))
1173 1173 data['users'] = users
1174 1174
1175 1175 return data
1176 1176
1177 1177
1178 1178 class UserGroupMember(Base, BaseModel):
1179 1179 __tablename__ = 'users_groups_members'
1180 1180 __table_args__ = (
1181 1181 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1182 1182 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1183 1183 )
1184 1184
1185 1185 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1186 1186 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1187 1187 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1188 1188
1189 1189 user = relationship('User', lazy='joined')
1190 1190 users_group = relationship('UserGroup')
1191 1191
1192 1192 def __init__(self, gr_id='', u_id=''):
1193 1193 self.users_group_id = gr_id
1194 1194 self.user_id = u_id
1195 1195
1196 1196
1197 1197 class RepositoryField(Base, BaseModel):
1198 1198 __tablename__ = 'repositories_fields'
1199 1199 __table_args__ = (
1200 1200 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1201 1201 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1202 1202 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1203 1203 )
1204 1204 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1205 1205
1206 1206 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1207 1207 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1208 1208 field_key = Column("field_key", String(250))
1209 1209 field_label = Column("field_label", String(1024), nullable=False)
1210 1210 field_value = Column("field_value", String(10000), nullable=False)
1211 1211 field_desc = Column("field_desc", String(1024), nullable=False)
1212 1212 field_type = Column("field_type", String(255), nullable=False, unique=None)
1213 1213 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1214 1214
1215 1215 repository = relationship('Repository')
1216 1216
1217 1217 @property
1218 1218 def field_key_prefixed(self):
1219 1219 return 'ex_%s' % self.field_key
1220 1220
1221 1221 @classmethod
1222 1222 def un_prefix_key(cls, key):
1223 1223 if key.startswith(cls.PREFIX):
1224 1224 return key[len(cls.PREFIX):]
1225 1225 return key
1226 1226
1227 1227 @classmethod
1228 1228 def get_by_key_name(cls, key, repo):
1229 1229 row = cls.query()\
1230 1230 .filter(cls.repository == repo)\
1231 1231 .filter(cls.field_key == key).scalar()
1232 1232 return row
1233 1233
1234 1234
1235 1235 class Repository(Base, BaseModel):
1236 1236 __tablename__ = 'repositories'
1237 1237 __table_args__ = (
1238 1238 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1239 1239 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1240 1240 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1241 1241 )
1242 1242 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1243 1243 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1244 1244
1245 1245 STATE_CREATED = 'repo_state_created'
1246 1246 STATE_PENDING = 'repo_state_pending'
1247 1247 STATE_ERROR = 'repo_state_error'
1248 1248
1249 1249 LOCK_AUTOMATIC = 'lock_auto'
1250 1250 LOCK_API = 'lock_api'
1251 1251 LOCK_WEB = 'lock_web'
1252 1252 LOCK_PULL = 'lock_pull'
1253 1253
1254 1254 NAME_SEP = URL_SEP
1255 1255
1256 1256 repo_id = Column(
1257 1257 "repo_id", Integer(), nullable=False, unique=True, default=None,
1258 1258 primary_key=True)
1259 1259 _repo_name = Column(
1260 1260 "repo_name", Text(), nullable=False, default=None)
1261 1261 _repo_name_hash = Column(
1262 1262 "repo_name_hash", String(255), nullable=False, unique=True)
1263 1263 repo_state = Column("repo_state", String(255), nullable=True)
1264 1264
1265 1265 clone_uri = Column(
1266 1266 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1267 1267 default=None)
1268 1268 repo_type = Column(
1269 1269 "repo_type", String(255), nullable=False, unique=False, default=None)
1270 1270 user_id = Column(
1271 1271 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1272 1272 unique=False, default=None)
1273 1273 private = Column(
1274 1274 "private", Boolean(), nullable=True, unique=None, default=None)
1275 1275 enable_statistics = Column(
1276 1276 "statistics", Boolean(), nullable=True, unique=None, default=True)
1277 1277 enable_downloads = Column(
1278 1278 "downloads", Boolean(), nullable=True, unique=None, default=True)
1279 1279 description = Column(
1280 1280 "description", String(10000), nullable=True, unique=None, default=None)
1281 1281 created_on = Column(
1282 1282 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1283 1283 default=datetime.datetime.now)
1284 1284 updated_on = Column(
1285 1285 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1286 1286 default=datetime.datetime.now)
1287 1287 _landing_revision = Column(
1288 1288 "landing_revision", String(255), nullable=False, unique=False,
1289 1289 default=None)
1290 1290 enable_locking = Column(
1291 1291 "enable_locking", Boolean(), nullable=False, unique=None,
1292 1292 default=False)
1293 1293 _locked = Column(
1294 1294 "locked", String(255), nullable=True, unique=False, default=None)
1295 1295 _changeset_cache = Column(
1296 1296 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1297 1297
1298 1298 fork_id = Column(
1299 1299 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1300 1300 nullable=True, unique=False, default=None)
1301 1301 group_id = Column(
1302 1302 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1303 1303 unique=False, default=None)
1304 1304
1305 1305 user = relationship('User', lazy='joined')
1306 1306 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1307 1307 group = relationship('RepoGroup', lazy='joined')
1308 1308 repo_to_perm = relationship(
1309 1309 'UserRepoToPerm', cascade='all',
1310 1310 order_by='UserRepoToPerm.repo_to_perm_id')
1311 1311 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1312 1312 stats = relationship('Statistics', cascade='all', uselist=False)
1313 1313
1314 1314 followers = relationship(
1315 1315 'UserFollowing',
1316 1316 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1317 1317 cascade='all')
1318 1318 extra_fields = relationship(
1319 1319 'RepositoryField', cascade="all, delete, delete-orphan")
1320 1320 logs = relationship('UserLog')
1321 1321 comments = relationship(
1322 1322 'ChangesetComment', cascade="all, delete, delete-orphan")
1323 1323 pull_requests_source = relationship(
1324 1324 'PullRequest',
1325 1325 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1326 1326 cascade="all, delete, delete-orphan")
1327 1327 pull_requests_target = relationship(
1328 1328 'PullRequest',
1329 1329 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1330 1330 cascade="all, delete, delete-orphan")
1331 1331 ui = relationship('RepoRhodeCodeUi', cascade="all")
1332 1332 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1333 1333
1334 1334 def __unicode__(self):
1335 1335 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1336 1336 safe_unicode(self.repo_name))
1337 1337
1338 1338 @hybrid_property
1339 1339 def landing_rev(self):
1340 1340 # always should return [rev_type, rev]
1341 1341 if self._landing_revision:
1342 1342 _rev_info = self._landing_revision.split(':')
1343 1343 if len(_rev_info) < 2:
1344 1344 _rev_info.insert(0, 'rev')
1345 1345 return [_rev_info[0], _rev_info[1]]
1346 1346 return [None, None]
1347 1347
1348 1348 @landing_rev.setter
1349 1349 def landing_rev(self, val):
1350 1350 if ':' not in val:
1351 1351 raise ValueError('value must be delimited with `:` and consist '
1352 1352 'of <rev_type>:<rev>, got %s instead' % val)
1353 1353 self._landing_revision = val
1354 1354
1355 1355 @hybrid_property
1356 1356 def locked(self):
1357 1357 if self._locked:
1358 1358 user_id, timelocked, reason = self._locked.split(':')
1359 1359 lock_values = int(user_id), timelocked, reason
1360 1360 else:
1361 1361 lock_values = [None, None, None]
1362 1362 return lock_values
1363 1363
1364 1364 @locked.setter
1365 1365 def locked(self, val):
1366 1366 if val and isinstance(val, (list, tuple)):
1367 1367 self._locked = ':'.join(map(str, val))
1368 1368 else:
1369 1369 self._locked = None
1370 1370
1371 1371 @hybrid_property
1372 1372 def changeset_cache(self):
1373 1373 from rhodecode.lib.vcs.backends.base import EmptyCommit
1374 1374 dummy = EmptyCommit().__json__()
1375 1375 if not self._changeset_cache:
1376 1376 return dummy
1377 1377 try:
1378 1378 return json.loads(self._changeset_cache)
1379 1379 except TypeError:
1380 1380 return dummy
1381 1381 except Exception:
1382 1382 log.error(traceback.format_exc())
1383 1383 return dummy
1384 1384
1385 1385 @changeset_cache.setter
1386 1386 def changeset_cache(self, val):
1387 1387 try:
1388 1388 self._changeset_cache = json.dumps(val)
1389 1389 except Exception:
1390 1390 log.error(traceback.format_exc())
1391 1391
1392 1392 @hybrid_property
1393 1393 def repo_name(self):
1394 1394 return self._repo_name
1395 1395
1396 1396 @repo_name.setter
1397 1397 def repo_name(self, value):
1398 1398 self._repo_name = value
1399 1399 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1400 1400
1401 1401 @classmethod
1402 1402 def normalize_repo_name(cls, repo_name):
1403 1403 """
1404 1404 Normalizes os specific repo_name to the format internally stored inside
1405 1405 database using URL_SEP
1406 1406
1407 1407 :param cls:
1408 1408 :param repo_name:
1409 1409 """
1410 1410 return cls.NAME_SEP.join(repo_name.split(os.sep))
1411 1411
1412 1412 @classmethod
1413 1413 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1414 1414 session = Session()
1415 1415 q = session.query(cls).filter(cls.repo_name == repo_name)
1416 1416
1417 1417 if cache:
1418 1418 if identity_cache:
1419 1419 val = cls.identity_cache(session, 'repo_name', repo_name)
1420 1420 if val:
1421 1421 return val
1422 1422 else:
1423 1423 q = q.options(
1424 1424 FromCache("sql_cache_short",
1425 1425 "get_repo_by_name_%s" % _hash_key(repo_name)))
1426 1426
1427 1427 return q.scalar()
1428 1428
1429 1429 @classmethod
1430 1430 def get_by_full_path(cls, repo_full_path):
1431 1431 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1432 1432 repo_name = cls.normalize_repo_name(repo_name)
1433 1433 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1434 1434
1435 1435 @classmethod
1436 1436 def get_repo_forks(cls, repo_id):
1437 1437 return cls.query().filter(Repository.fork_id == repo_id)
1438 1438
1439 1439 @classmethod
1440 1440 def base_path(cls):
1441 1441 """
1442 1442 Returns base path when all repos are stored
1443 1443
1444 1444 :param cls:
1445 1445 """
1446 1446 q = Session().query(RhodeCodeUi)\
1447 1447 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1448 1448 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1449 1449 return q.one().ui_value
1450 1450
1451 1451 @classmethod
1452 1452 def is_valid(cls, repo_name):
1453 1453 """
1454 1454 returns True if given repo name is a valid filesystem repository
1455 1455
1456 1456 :param cls:
1457 1457 :param repo_name:
1458 1458 """
1459 1459 from rhodecode.lib.utils import is_valid_repo
1460 1460
1461 1461 return is_valid_repo(repo_name, cls.base_path())
1462 1462
1463 1463 @classmethod
1464 1464 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1465 1465 case_insensitive=True):
1466 1466 q = Repository.query()
1467 1467
1468 1468 if not isinstance(user_id, Optional):
1469 1469 q = q.filter(Repository.user_id == user_id)
1470 1470
1471 1471 if not isinstance(group_id, Optional):
1472 1472 q = q.filter(Repository.group_id == group_id)
1473 1473
1474 1474 if case_insensitive:
1475 1475 q = q.order_by(func.lower(Repository.repo_name))
1476 1476 else:
1477 1477 q = q.order_by(Repository.repo_name)
1478 1478 return q.all()
1479 1479
1480 1480 @property
1481 1481 def forks(self):
1482 1482 """
1483 1483 Return forks of this repo
1484 1484 """
1485 1485 return Repository.get_repo_forks(self.repo_id)
1486 1486
1487 1487 @property
1488 1488 def parent(self):
1489 1489 """
1490 1490 Returns fork parent
1491 1491 """
1492 1492 return self.fork
1493 1493
1494 1494 @property
1495 1495 def just_name(self):
1496 1496 return self.repo_name.split(self.NAME_SEP)[-1]
1497 1497
1498 1498 @property
1499 1499 def groups_with_parents(self):
1500 1500 groups = []
1501 1501 if self.group is None:
1502 1502 return groups
1503 1503
1504 1504 cur_gr = self.group
1505 1505 groups.insert(0, cur_gr)
1506 1506 while 1:
1507 1507 gr = getattr(cur_gr, 'parent_group', None)
1508 1508 cur_gr = cur_gr.parent_group
1509 1509 if gr is None:
1510 1510 break
1511 1511 groups.insert(0, gr)
1512 1512
1513 1513 return groups
1514 1514
1515 1515 @property
1516 1516 def groups_and_repo(self):
1517 1517 return self.groups_with_parents, self
1518 1518
1519 1519 @LazyProperty
1520 1520 def repo_path(self):
1521 1521 """
1522 1522 Returns base full path for that repository means where it actually
1523 1523 exists on a filesystem
1524 1524 """
1525 1525 q = Session().query(RhodeCodeUi).filter(
1526 1526 RhodeCodeUi.ui_key == self.NAME_SEP)
1527 1527 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1528 1528 return q.one().ui_value
1529 1529
1530 1530 @property
1531 1531 def repo_full_path(self):
1532 1532 p = [self.repo_path]
1533 1533 # we need to split the name by / since this is how we store the
1534 1534 # names in the database, but that eventually needs to be converted
1535 1535 # into a valid system path
1536 1536 p += self.repo_name.split(self.NAME_SEP)
1537 1537 return os.path.join(*map(safe_unicode, p))
1538 1538
1539 1539 @property
1540 1540 def cache_keys(self):
1541 1541 """
1542 1542 Returns associated cache keys for that repo
1543 1543 """
1544 1544 return CacheKey.query()\
1545 1545 .filter(CacheKey.cache_args == self.repo_name)\
1546 1546 .order_by(CacheKey.cache_key)\
1547 1547 .all()
1548 1548
1549 1549 def get_new_name(self, repo_name):
1550 1550 """
1551 1551 returns new full repository name based on assigned group and new new
1552 1552
1553 1553 :param group_name:
1554 1554 """
1555 1555 path_prefix = self.group.full_path_splitted if self.group else []
1556 1556 return self.NAME_SEP.join(path_prefix + [repo_name])
1557 1557
1558 1558 @property
1559 1559 def _config(self):
1560 1560 """
1561 1561 Returns db based config object.
1562 1562 """
1563 1563 from rhodecode.lib.utils import make_db_config
1564 1564 return make_db_config(clear_session=False, repo=self)
1565 1565
1566 1566 def permissions(self, with_admins=True, with_owner=True):
1567 1567 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1568 1568 q = q.options(joinedload(UserRepoToPerm.repository),
1569 1569 joinedload(UserRepoToPerm.user),
1570 1570 joinedload(UserRepoToPerm.permission),)
1571 1571
1572 1572 # get owners and admins and permissions. We do a trick of re-writing
1573 1573 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1574 1574 # has a global reference and changing one object propagates to all
1575 1575 # others. This means if admin is also an owner admin_row that change
1576 1576 # would propagate to both objects
1577 1577 perm_rows = []
1578 1578 for _usr in q.all():
1579 1579 usr = AttributeDict(_usr.user.get_dict())
1580 1580 usr.permission = _usr.permission.permission_name
1581 1581 perm_rows.append(usr)
1582 1582
1583 1583 # filter the perm rows by 'default' first and then sort them by
1584 1584 # admin,write,read,none permissions sorted again alphabetically in
1585 1585 # each group
1586 1586 perm_rows = sorted(perm_rows, key=display_sort)
1587 1587
1588 1588 _admin_perm = 'repository.admin'
1589 1589 owner_row = []
1590 1590 if with_owner:
1591 1591 usr = AttributeDict(self.user.get_dict())
1592 1592 usr.owner_row = True
1593 1593 usr.permission = _admin_perm
1594 1594 owner_row.append(usr)
1595 1595
1596 1596 super_admin_rows = []
1597 1597 if with_admins:
1598 1598 for usr in User.get_all_super_admins():
1599 1599 # if this admin is also owner, don't double the record
1600 1600 if usr.user_id == owner_row[0].user_id:
1601 1601 owner_row[0].admin_row = True
1602 1602 else:
1603 1603 usr = AttributeDict(usr.get_dict())
1604 1604 usr.admin_row = True
1605 1605 usr.permission = _admin_perm
1606 1606 super_admin_rows.append(usr)
1607 1607
1608 1608 return super_admin_rows + owner_row + perm_rows
1609 1609
1610 1610 def permission_user_groups(self):
1611 1611 q = UserGroupRepoToPerm.query().filter(
1612 1612 UserGroupRepoToPerm.repository == self)
1613 1613 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1614 1614 joinedload(UserGroupRepoToPerm.users_group),
1615 1615 joinedload(UserGroupRepoToPerm.permission),)
1616 1616
1617 1617 perm_rows = []
1618 1618 for _user_group in q.all():
1619 1619 usr = AttributeDict(_user_group.users_group.get_dict())
1620 1620 usr.permission = _user_group.permission.permission_name
1621 1621 perm_rows.append(usr)
1622 1622
1623 1623 return perm_rows
1624 1624
1625 1625 def get_api_data(self, include_secrets=False):
1626 1626 """
1627 1627 Common function for generating repo api data
1628 1628
1629 1629 :param include_secrets: See :meth:`User.get_api_data`.
1630 1630
1631 1631 """
1632 1632 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1633 1633 # move this methods on models level.
1634 1634 from rhodecode.model.settings import SettingsModel
1635 1635
1636 1636 repo = self
1637 1637 _user_id, _time, _reason = self.locked
1638 1638
1639 1639 data = {
1640 1640 'repo_id': repo.repo_id,
1641 1641 'repo_name': repo.repo_name,
1642 1642 'repo_type': repo.repo_type,
1643 1643 'clone_uri': repo.clone_uri or '',
1644 'url': url('summary_home', repo_name=self.repo_name, qualified=True),
1644 1645 'private': repo.private,
1645 1646 'created_on': repo.created_on,
1646 1647 'description': repo.description,
1647 1648 'landing_rev': repo.landing_rev,
1648 1649 'owner': repo.user.username,
1649 1650 'fork_of': repo.fork.repo_name if repo.fork else None,
1650 1651 'enable_statistics': repo.enable_statistics,
1651 1652 'enable_locking': repo.enable_locking,
1652 1653 'enable_downloads': repo.enable_downloads,
1653 1654 'last_changeset': repo.changeset_cache,
1654 1655 'locked_by': User.get(_user_id).get_api_data(
1655 1656 include_secrets=include_secrets) if _user_id else None,
1656 1657 'locked_date': time_to_datetime(_time) if _time else None,
1657 1658 'lock_reason': _reason if _reason else None,
1658 1659 }
1659 1660
1660 1661 # TODO: mikhail: should be per-repo settings here
1661 1662 rc_config = SettingsModel().get_all_settings()
1662 1663 repository_fields = str2bool(
1663 1664 rc_config.get('rhodecode_repository_fields'))
1664 1665 if repository_fields:
1665 1666 for f in self.extra_fields:
1666 1667 data[f.field_key_prefixed] = f.field_value
1667 1668
1668 1669 return data
1669 1670
1670 1671 @classmethod
1671 1672 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1672 1673 if not lock_time:
1673 1674 lock_time = time.time()
1674 1675 if not lock_reason:
1675 1676 lock_reason = cls.LOCK_AUTOMATIC
1676 1677 repo.locked = [user_id, lock_time, lock_reason]
1677 1678 Session().add(repo)
1678 1679 Session().commit()
1679 1680
1680 1681 @classmethod
1681 1682 def unlock(cls, repo):
1682 1683 repo.locked = None
1683 1684 Session().add(repo)
1684 1685 Session().commit()
1685 1686
1686 1687 @classmethod
1687 1688 def getlock(cls, repo):
1688 1689 return repo.locked
1689 1690
1690 1691 def is_user_lock(self, user_id):
1691 1692 if self.lock[0]:
1692 1693 lock_user_id = safe_int(self.lock[0])
1693 1694 user_id = safe_int(user_id)
1694 1695 # both are ints, and they are equal
1695 1696 return all([lock_user_id, user_id]) and lock_user_id == user_id
1696 1697
1697 1698 return False
1698 1699
1699 1700 def get_locking_state(self, action, user_id, only_when_enabled=True):
1700 1701 """
1701 1702 Checks locking on this repository, if locking is enabled and lock is
1702 1703 present returns a tuple of make_lock, locked, locked_by.
1703 1704 make_lock can have 3 states None (do nothing) True, make lock
1704 1705 False release lock, This value is later propagated to hooks, which
1705 1706 do the locking. Think about this as signals passed to hooks what to do.
1706 1707
1707 1708 """
1708 1709 # TODO: johbo: This is part of the business logic and should be moved
1709 1710 # into the RepositoryModel.
1710 1711
1711 1712 if action not in ('push', 'pull'):
1712 1713 raise ValueError("Invalid action value: %s" % repr(action))
1713 1714
1714 1715 # defines if locked error should be thrown to user
1715 1716 currently_locked = False
1716 1717 # defines if new lock should be made, tri-state
1717 1718 make_lock = None
1718 1719 repo = self
1719 1720 user = User.get(user_id)
1720 1721
1721 1722 lock_info = repo.locked
1722 1723
1723 1724 if repo and (repo.enable_locking or not only_when_enabled):
1724 1725 if action == 'push':
1725 1726 # check if it's already locked !, if it is compare users
1726 1727 locked_by_user_id = lock_info[0]
1727 1728 if user.user_id == locked_by_user_id:
1728 1729 log.debug(
1729 1730 'Got `push` action from user %s, now unlocking', user)
1730 1731 # unlock if we have push from user who locked
1731 1732 make_lock = False
1732 1733 else:
1733 1734 # we're not the same user who locked, ban with
1734 1735 # code defined in settings (default is 423 HTTP Locked) !
1735 1736 log.debug('Repo %s is currently locked by %s', repo, user)
1736 1737 currently_locked = True
1737 1738 elif action == 'pull':
1738 1739 # [0] user [1] date
1739 1740 if lock_info[0] and lock_info[1]:
1740 1741 log.debug('Repo %s is currently locked by %s', repo, user)
1741 1742 currently_locked = True
1742 1743 else:
1743 1744 log.debug('Setting lock on repo %s by %s', repo, user)
1744 1745 make_lock = True
1745 1746
1746 1747 else:
1747 1748 log.debug('Repository %s do not have locking enabled', repo)
1748 1749
1749 1750 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1750 1751 make_lock, currently_locked, lock_info)
1751 1752
1752 1753 from rhodecode.lib.auth import HasRepoPermissionAny
1753 1754 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1754 1755 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1755 1756 # if we don't have at least write permission we cannot make a lock
1756 1757 log.debug('lock state reset back to FALSE due to lack '
1757 1758 'of at least read permission')
1758 1759 make_lock = False
1759 1760
1760 1761 return make_lock, currently_locked, lock_info
1761 1762
1762 1763 @property
1763 1764 def last_db_change(self):
1764 1765 return self.updated_on
1765 1766
1766 1767 @property
1767 1768 def clone_uri_hidden(self):
1768 1769 clone_uri = self.clone_uri
1769 1770 if clone_uri:
1770 1771 import urlobject
1771 1772 url_obj = urlobject.URLObject(clone_uri)
1772 1773 if url_obj.password:
1773 1774 clone_uri = url_obj.with_password('*****')
1774 1775 return clone_uri
1775 1776
1776 1777 def clone_url(self, **override):
1777 1778 qualified_home_url = url('home', qualified=True)
1778 1779
1779 1780 uri_tmpl = None
1780 1781 if 'with_id' in override:
1781 1782 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1782 1783 del override['with_id']
1783 1784
1784 1785 if 'uri_tmpl' in override:
1785 1786 uri_tmpl = override['uri_tmpl']
1786 1787 del override['uri_tmpl']
1787 1788
1788 1789 # we didn't override our tmpl from **overrides
1789 1790 if not uri_tmpl:
1790 1791 uri_tmpl = self.DEFAULT_CLONE_URI
1791 1792 try:
1792 1793 from pylons import tmpl_context as c
1793 1794 uri_tmpl = c.clone_uri_tmpl
1794 1795 except Exception:
1795 1796 # in any case if we call this outside of request context,
1796 1797 # ie, not having tmpl_context set up
1797 1798 pass
1798 1799
1799 1800 return get_clone_url(uri_tmpl=uri_tmpl,
1800 1801 qualifed_home_url=qualified_home_url,
1801 1802 repo_name=self.repo_name,
1802 1803 repo_id=self.repo_id, **override)
1803 1804
1804 1805 def set_state(self, state):
1805 1806 self.repo_state = state
1806 1807 Session().add(self)
1807 1808 #==========================================================================
1808 1809 # SCM PROPERTIES
1809 1810 #==========================================================================
1810 1811
1811 1812 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1812 1813 return get_commit_safe(
1813 1814 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1814 1815
1815 1816 def get_changeset(self, rev=None, pre_load=None):
1816 1817 warnings.warn("Use get_commit", DeprecationWarning)
1817 1818 commit_id = None
1818 1819 commit_idx = None
1819 1820 if isinstance(rev, basestring):
1820 1821 commit_id = rev
1821 1822 else:
1822 1823 commit_idx = rev
1823 1824 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1824 1825 pre_load=pre_load)
1825 1826
1826 1827 def get_landing_commit(self):
1827 1828 """
1828 1829 Returns landing commit, or if that doesn't exist returns the tip
1829 1830 """
1830 1831 _rev_type, _rev = self.landing_rev
1831 1832 commit = self.get_commit(_rev)
1832 1833 if isinstance(commit, EmptyCommit):
1833 1834 return self.get_commit()
1834 1835 return commit
1835 1836
1836 1837 def update_commit_cache(self, cs_cache=None, config=None):
1837 1838 """
1838 1839 Update cache of last changeset for repository, keys should be::
1839 1840
1840 1841 short_id
1841 1842 raw_id
1842 1843 revision
1843 1844 parents
1844 1845 message
1845 1846 date
1846 1847 author
1847 1848
1848 1849 :param cs_cache:
1849 1850 """
1850 1851 from rhodecode.lib.vcs.backends.base import BaseChangeset
1851 1852 if cs_cache is None:
1852 1853 # use no-cache version here
1853 1854 scm_repo = self.scm_instance(cache=False, config=config)
1854 1855 if scm_repo:
1855 1856 cs_cache = scm_repo.get_commit(
1856 1857 pre_load=["author", "date", "message", "parents"])
1857 1858 else:
1858 1859 cs_cache = EmptyCommit()
1859 1860
1860 1861 if isinstance(cs_cache, BaseChangeset):
1861 1862 cs_cache = cs_cache.__json__()
1862 1863
1863 1864 def is_outdated(new_cs_cache):
1864 1865 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1865 1866 new_cs_cache['revision'] != self.changeset_cache['revision']):
1866 1867 return True
1867 1868 return False
1868 1869
1869 1870 # check if we have maybe already latest cached revision
1870 1871 if is_outdated(cs_cache) or not self.changeset_cache:
1871 1872 _default = datetime.datetime.fromtimestamp(0)
1872 1873 last_change = cs_cache.get('date') or _default
1873 1874 log.debug('updated repo %s with new cs cache %s',
1874 1875 self.repo_name, cs_cache)
1875 1876 self.updated_on = last_change
1876 1877 self.changeset_cache = cs_cache
1877 1878 Session().add(self)
1878 1879 Session().commit()
1879 1880 else:
1880 1881 log.debug('Skipping update_commit_cache for repo:`%s` '
1881 1882 'commit already with latest changes', self.repo_name)
1882 1883
1883 1884 @property
1884 1885 def tip(self):
1885 1886 return self.get_commit('tip')
1886 1887
1887 1888 @property
1888 1889 def author(self):
1889 1890 return self.tip.author
1890 1891
1891 1892 @property
1892 1893 def last_change(self):
1893 1894 return self.scm_instance().last_change
1894 1895
1895 1896 def get_comments(self, revisions=None):
1896 1897 """
1897 1898 Returns comments for this repository grouped by revisions
1898 1899
1899 1900 :param revisions: filter query by revisions only
1900 1901 """
1901 1902 cmts = ChangesetComment.query()\
1902 1903 .filter(ChangesetComment.repo == self)
1903 1904 if revisions:
1904 1905 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1905 1906 grouped = collections.defaultdict(list)
1906 1907 for cmt in cmts.all():
1907 1908 grouped[cmt.revision].append(cmt)
1908 1909 return grouped
1909 1910
1910 1911 def statuses(self, revisions=None):
1911 1912 """
1912 1913 Returns statuses for this repository
1913 1914
1914 1915 :param revisions: list of revisions to get statuses for
1915 1916 """
1916 1917 statuses = ChangesetStatus.query()\
1917 1918 .filter(ChangesetStatus.repo == self)\
1918 1919 .filter(ChangesetStatus.version == 0)
1919 1920
1920 1921 if revisions:
1921 1922 # Try doing the filtering in chunks to avoid hitting limits
1922 1923 size = 500
1923 1924 status_results = []
1924 1925 for chunk in xrange(0, len(revisions), size):
1925 1926 status_results += statuses.filter(
1926 1927 ChangesetStatus.revision.in_(
1927 1928 revisions[chunk: chunk+size])
1928 1929 ).all()
1929 1930 else:
1930 1931 status_results = statuses.all()
1931 1932
1932 1933 grouped = {}
1933 1934
1934 1935 # maybe we have open new pullrequest without a status?
1935 1936 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1936 1937 status_lbl = ChangesetStatus.get_status_lbl(stat)
1937 1938 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1938 1939 for rev in pr.revisions:
1939 1940 pr_id = pr.pull_request_id
1940 1941 pr_repo = pr.target_repo.repo_name
1941 1942 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1942 1943
1943 1944 for stat in status_results:
1944 1945 pr_id = pr_repo = None
1945 1946 if stat.pull_request:
1946 1947 pr_id = stat.pull_request.pull_request_id
1947 1948 pr_repo = stat.pull_request.target_repo.repo_name
1948 1949 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1949 1950 pr_id, pr_repo]
1950 1951 return grouped
1951 1952
1952 1953 # ==========================================================================
1953 1954 # SCM CACHE INSTANCE
1954 1955 # ==========================================================================
1955 1956
1956 1957 def scm_instance(self, **kwargs):
1957 1958 import rhodecode
1958 1959
1959 1960 # Passing a config will not hit the cache currently only used
1960 1961 # for repo2dbmapper
1961 1962 config = kwargs.pop('config', None)
1962 1963 cache = kwargs.pop('cache', None)
1963 1964 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1964 1965 # if cache is NOT defined use default global, else we have a full
1965 1966 # control over cache behaviour
1966 1967 if cache is None and full_cache and not config:
1967 1968 return self._get_instance_cached()
1968 1969 return self._get_instance(cache=bool(cache), config=config)
1969 1970
1970 1971 def _get_instance_cached(self):
1971 1972 @cache_region('long_term')
1972 1973 def _get_repo(cache_key):
1973 1974 return self._get_instance()
1974 1975
1975 1976 invalidator_context = CacheKey.repo_context_cache(
1976 1977 _get_repo, self.repo_name, None)
1977 1978
1978 1979 with invalidator_context as context:
1979 1980 context.invalidate()
1980 1981 repo = context.compute()
1981 1982
1982 1983 return repo
1983 1984
1984 1985 def _get_instance(self, cache=True, config=None):
1985 1986 repo_full_path = self.repo_full_path
1986 1987 try:
1987 1988 vcs_alias = get_scm(repo_full_path)[0]
1988 1989 log.debug(
1989 1990 'Creating instance of %s repository from %s',
1990 1991 vcs_alias, repo_full_path)
1991 1992 backend = get_backend(vcs_alias)
1992 1993 except VCSError:
1993 1994 log.exception(
1994 1995 'Perhaps this repository is in db and not in '
1995 1996 'filesystem run rescan repositories with '
1996 1997 '"destroy old data" option from admin panel')
1997 1998 return
1998 1999
1999 2000 config = config or self._config
2000 2001 custom_wire = {
2001 2002 'cache': cache # controls the vcs.remote cache
2002 2003 }
2003 2004 repo = backend(
2004 2005 safe_str(repo_full_path), config=config, create=False,
2005 2006 with_wire=custom_wire)
2006 2007
2007 2008 return repo
2008 2009
2009 2010 def __json__(self):
2010 2011 return {'landing_rev': self.landing_rev}
2011 2012
2012 2013 def get_dict(self):
2013 2014
2014 2015 # Since we transformed `repo_name` to a hybrid property, we need to
2015 2016 # keep compatibility with the code which uses `repo_name` field.
2016 2017
2017 2018 result = super(Repository, self).get_dict()
2018 2019 result['repo_name'] = result.pop('_repo_name', None)
2019 2020 return result
2020 2021
2021 2022
2022 2023 class RepoGroup(Base, BaseModel):
2023 2024 __tablename__ = 'groups'
2024 2025 __table_args__ = (
2025 2026 UniqueConstraint('group_name', 'group_parent_id'),
2026 2027 CheckConstraint('group_id != group_parent_id'),
2027 2028 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2028 2029 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2029 2030 )
2030 2031 __mapper_args__ = {'order_by': 'group_name'}
2031 2032
2032 2033 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2033 2034
2034 2035 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2035 2036 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2036 2037 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2037 2038 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2038 2039 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2039 2040 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2040 2041 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2041 2042
2042 2043 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2043 2044 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2044 2045 parent_group = relationship('RepoGroup', remote_side=group_id)
2045 2046 user = relationship('User')
2046 2047
2047 2048 def __init__(self, group_name='', parent_group=None):
2048 2049 self.group_name = group_name
2049 2050 self.parent_group = parent_group
2050 2051
2051 2052 def __unicode__(self):
2052 2053 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2053 2054 self.group_name)
2054 2055
2055 2056 @classmethod
2056 2057 def _generate_choice(cls, repo_group):
2057 2058 from webhelpers.html import literal as _literal
2058 2059 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2059 2060 return repo_group.group_id, _name(repo_group.full_path_splitted)
2060 2061
2061 2062 @classmethod
2062 2063 def groups_choices(cls, groups=None, show_empty_group=True):
2063 2064 if not groups:
2064 2065 groups = cls.query().all()
2065 2066
2066 2067 repo_groups = []
2067 2068 if show_empty_group:
2068 2069 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2069 2070
2070 2071 repo_groups.extend([cls._generate_choice(x) for x in groups])
2071 2072
2072 2073 repo_groups = sorted(
2073 2074 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2074 2075 return repo_groups
2075 2076
2076 2077 @classmethod
2077 2078 def url_sep(cls):
2078 2079 return URL_SEP
2079 2080
2080 2081 @classmethod
2081 2082 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2082 2083 if case_insensitive:
2083 2084 gr = cls.query().filter(func.lower(cls.group_name)
2084 2085 == func.lower(group_name))
2085 2086 else:
2086 2087 gr = cls.query().filter(cls.group_name == group_name)
2087 2088 if cache:
2088 2089 gr = gr.options(FromCache(
2089 2090 "sql_cache_short",
2090 2091 "get_group_%s" % _hash_key(group_name)))
2091 2092 return gr.scalar()
2092 2093
2093 2094 @classmethod
2094 2095 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2095 2096 case_insensitive=True):
2096 2097 q = RepoGroup.query()
2097 2098
2098 2099 if not isinstance(user_id, Optional):
2099 2100 q = q.filter(RepoGroup.user_id == user_id)
2100 2101
2101 2102 if not isinstance(group_id, Optional):
2102 2103 q = q.filter(RepoGroup.group_parent_id == group_id)
2103 2104
2104 2105 if case_insensitive:
2105 2106 q = q.order_by(func.lower(RepoGroup.group_name))
2106 2107 else:
2107 2108 q = q.order_by(RepoGroup.group_name)
2108 2109 return q.all()
2109 2110
2110 2111 @property
2111 2112 def parents(self):
2112 2113 parents_recursion_limit = 10
2113 2114 groups = []
2114 2115 if self.parent_group is None:
2115 2116 return groups
2116 2117 cur_gr = self.parent_group
2117 2118 groups.insert(0, cur_gr)
2118 2119 cnt = 0
2119 2120 while 1:
2120 2121 cnt += 1
2121 2122 gr = getattr(cur_gr, 'parent_group', None)
2122 2123 cur_gr = cur_gr.parent_group
2123 2124 if gr is None:
2124 2125 break
2125 2126 if cnt == parents_recursion_limit:
2126 2127 # this will prevent accidental infinit loops
2127 2128 log.error(('more than %s parents found for group %s, stopping '
2128 2129 'recursive parent fetching' % (parents_recursion_limit, self)))
2129 2130 break
2130 2131
2131 2132 groups.insert(0, gr)
2132 2133 return groups
2133 2134
2134 2135 @property
2135 2136 def children(self):
2136 2137 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2137 2138
2138 2139 @property
2139 2140 def name(self):
2140 2141 return self.group_name.split(RepoGroup.url_sep())[-1]
2141 2142
2142 2143 @property
2143 2144 def full_path(self):
2144 2145 return self.group_name
2145 2146
2146 2147 @property
2147 2148 def full_path_splitted(self):
2148 2149 return self.group_name.split(RepoGroup.url_sep())
2149 2150
2150 2151 @property
2151 2152 def repositories(self):
2152 2153 return Repository.query()\
2153 2154 .filter(Repository.group == self)\
2154 2155 .order_by(Repository.repo_name)
2155 2156
2156 2157 @property
2157 2158 def repositories_recursive_count(self):
2158 2159 cnt = self.repositories.count()
2159 2160
2160 2161 def children_count(group):
2161 2162 cnt = 0
2162 2163 for child in group.children:
2163 2164 cnt += child.repositories.count()
2164 2165 cnt += children_count(child)
2165 2166 return cnt
2166 2167
2167 2168 return cnt + children_count(self)
2168 2169
2169 2170 def _recursive_objects(self, include_repos=True):
2170 2171 all_ = []
2171 2172
2172 2173 def _get_members(root_gr):
2173 2174 if include_repos:
2174 2175 for r in root_gr.repositories:
2175 2176 all_.append(r)
2176 2177 childs = root_gr.children.all()
2177 2178 if childs:
2178 2179 for gr in childs:
2179 2180 all_.append(gr)
2180 2181 _get_members(gr)
2181 2182
2182 2183 _get_members(self)
2183 2184 return [self] + all_
2184 2185
2185 2186 def recursive_groups_and_repos(self):
2186 2187 """
2187 2188 Recursive return all groups, with repositories in those groups
2188 2189 """
2189 2190 return self._recursive_objects()
2190 2191
2191 2192 def recursive_groups(self):
2192 2193 """
2193 2194 Returns all children groups for this group including children of children
2194 2195 """
2195 2196 return self._recursive_objects(include_repos=False)
2196 2197
2197 2198 def get_new_name(self, group_name):
2198 2199 """
2199 2200 returns new full group name based on parent and new name
2200 2201
2201 2202 :param group_name:
2202 2203 """
2203 2204 path_prefix = (self.parent_group.full_path_splitted if
2204 2205 self.parent_group else [])
2205 2206 return RepoGroup.url_sep().join(path_prefix + [group_name])
2206 2207
2207 2208 def permissions(self, with_admins=True, with_owner=True):
2208 2209 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2209 2210 q = q.options(joinedload(UserRepoGroupToPerm.group),
2210 2211 joinedload(UserRepoGroupToPerm.user),
2211 2212 joinedload(UserRepoGroupToPerm.permission),)
2212 2213
2213 2214 # get owners and admins and permissions. We do a trick of re-writing
2214 2215 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2215 2216 # has a global reference and changing one object propagates to all
2216 2217 # others. This means if admin is also an owner admin_row that change
2217 2218 # would propagate to both objects
2218 2219 perm_rows = []
2219 2220 for _usr in q.all():
2220 2221 usr = AttributeDict(_usr.user.get_dict())
2221 2222 usr.permission = _usr.permission.permission_name
2222 2223 perm_rows.append(usr)
2223 2224
2224 2225 # filter the perm rows by 'default' first and then sort them by
2225 2226 # admin,write,read,none permissions sorted again alphabetically in
2226 2227 # each group
2227 2228 perm_rows = sorted(perm_rows, key=display_sort)
2228 2229
2229 2230 _admin_perm = 'group.admin'
2230 2231 owner_row = []
2231 2232 if with_owner:
2232 2233 usr = AttributeDict(self.user.get_dict())
2233 2234 usr.owner_row = True
2234 2235 usr.permission = _admin_perm
2235 2236 owner_row.append(usr)
2236 2237
2237 2238 super_admin_rows = []
2238 2239 if with_admins:
2239 2240 for usr in User.get_all_super_admins():
2240 2241 # if this admin is also owner, don't double the record
2241 2242 if usr.user_id == owner_row[0].user_id:
2242 2243 owner_row[0].admin_row = True
2243 2244 else:
2244 2245 usr = AttributeDict(usr.get_dict())
2245 2246 usr.admin_row = True
2246 2247 usr.permission = _admin_perm
2247 2248 super_admin_rows.append(usr)
2248 2249
2249 2250 return super_admin_rows + owner_row + perm_rows
2250 2251
2251 2252 def permission_user_groups(self):
2252 2253 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2253 2254 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2254 2255 joinedload(UserGroupRepoGroupToPerm.users_group),
2255 2256 joinedload(UserGroupRepoGroupToPerm.permission),)
2256 2257
2257 2258 perm_rows = []
2258 2259 for _user_group in q.all():
2259 2260 usr = AttributeDict(_user_group.users_group.get_dict())
2260 2261 usr.permission = _user_group.permission.permission_name
2261 2262 perm_rows.append(usr)
2262 2263
2263 2264 return perm_rows
2264 2265
2265 2266 def get_api_data(self):
2266 2267 """
2267 2268 Common function for generating api data
2268 2269
2269 2270 """
2270 2271 group = self
2271 2272 data = {
2272 2273 'group_id': group.group_id,
2273 2274 'group_name': group.group_name,
2274 2275 'group_description': group.group_description,
2275 2276 'parent_group': group.parent_group.group_name if group.parent_group else None,
2276 2277 'repositories': [x.repo_name for x in group.repositories],
2277 2278 'owner': group.user.username,
2278 2279 }
2279 2280 return data
2280 2281
2281 2282
2282 2283 class Permission(Base, BaseModel):
2283 2284 __tablename__ = 'permissions'
2284 2285 __table_args__ = (
2285 2286 Index('p_perm_name_idx', 'permission_name'),
2286 2287 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2287 2288 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2288 2289 )
2289 2290 PERMS = [
2290 2291 ('hg.admin', _('RhodeCode Super Administrator')),
2291 2292
2292 2293 ('repository.none', _('Repository no access')),
2293 2294 ('repository.read', _('Repository read access')),
2294 2295 ('repository.write', _('Repository write access')),
2295 2296 ('repository.admin', _('Repository admin access')),
2296 2297
2297 2298 ('group.none', _('Repository group no access')),
2298 2299 ('group.read', _('Repository group read access')),
2299 2300 ('group.write', _('Repository group write access')),
2300 2301 ('group.admin', _('Repository group admin access')),
2301 2302
2302 2303 ('usergroup.none', _('User group no access')),
2303 2304 ('usergroup.read', _('User group read access')),
2304 2305 ('usergroup.write', _('User group write access')),
2305 2306 ('usergroup.admin', _('User group admin access')),
2306 2307
2307 2308 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2308 2309 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2309 2310
2310 2311 ('hg.usergroup.create.false', _('User Group creation disabled')),
2311 2312 ('hg.usergroup.create.true', _('User Group creation enabled')),
2312 2313
2313 2314 ('hg.create.none', _('Repository creation disabled')),
2314 2315 ('hg.create.repository', _('Repository creation enabled')),
2315 2316 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2316 2317 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2317 2318
2318 2319 ('hg.fork.none', _('Repository forking disabled')),
2319 2320 ('hg.fork.repository', _('Repository forking enabled')),
2320 2321
2321 2322 ('hg.register.none', _('Registration disabled')),
2322 2323 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2323 2324 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2324 2325
2325 2326 ('hg.extern_activate.manual', _('Manual activation of external account')),
2326 2327 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2327 2328
2328 2329 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2329 2330 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2330 2331 ]
2331 2332
2332 2333 # definition of system default permissions for DEFAULT user
2333 2334 DEFAULT_USER_PERMISSIONS = [
2334 2335 'repository.read',
2335 2336 'group.read',
2336 2337 'usergroup.read',
2337 2338 'hg.create.repository',
2338 2339 'hg.repogroup.create.false',
2339 2340 'hg.usergroup.create.false',
2340 2341 'hg.create.write_on_repogroup.true',
2341 2342 'hg.fork.repository',
2342 2343 'hg.register.manual_activate',
2343 2344 'hg.extern_activate.auto',
2344 2345 'hg.inherit_default_perms.true',
2345 2346 ]
2346 2347
2347 2348 # defines which permissions are more important higher the more important
2348 2349 # Weight defines which permissions are more important.
2349 2350 # The higher number the more important.
2350 2351 PERM_WEIGHTS = {
2351 2352 'repository.none': 0,
2352 2353 'repository.read': 1,
2353 2354 'repository.write': 3,
2354 2355 'repository.admin': 4,
2355 2356
2356 2357 'group.none': 0,
2357 2358 'group.read': 1,
2358 2359 'group.write': 3,
2359 2360 'group.admin': 4,
2360 2361
2361 2362 'usergroup.none': 0,
2362 2363 'usergroup.read': 1,
2363 2364 'usergroup.write': 3,
2364 2365 'usergroup.admin': 4,
2365 2366
2366 2367 'hg.repogroup.create.false': 0,
2367 2368 'hg.repogroup.create.true': 1,
2368 2369
2369 2370 'hg.usergroup.create.false': 0,
2370 2371 'hg.usergroup.create.true': 1,
2371 2372
2372 2373 'hg.fork.none': 0,
2373 2374 'hg.fork.repository': 1,
2374 2375 'hg.create.none': 0,
2375 2376 'hg.create.repository': 1
2376 2377 }
2377 2378
2378 2379 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2379 2380 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2380 2381 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2381 2382
2382 2383 def __unicode__(self):
2383 2384 return u"<%s('%s:%s')>" % (
2384 2385 self.__class__.__name__, self.permission_id, self.permission_name
2385 2386 )
2386 2387
2387 2388 @classmethod
2388 2389 def get_by_key(cls, key):
2389 2390 return cls.query().filter(cls.permission_name == key).scalar()
2390 2391
2391 2392 @classmethod
2392 2393 def get_default_repo_perms(cls, user_id, repo_id=None):
2393 2394 q = Session().query(UserRepoToPerm, Repository, Permission)\
2394 2395 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2395 2396 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2396 2397 .filter(UserRepoToPerm.user_id == user_id)
2397 2398 if repo_id:
2398 2399 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2399 2400 return q.all()
2400 2401
2401 2402 @classmethod
2402 2403 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2403 2404 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2404 2405 .join(
2405 2406 Permission,
2406 2407 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2407 2408 .join(
2408 2409 Repository,
2409 2410 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2410 2411 .join(
2411 2412 UserGroup,
2412 2413 UserGroupRepoToPerm.users_group_id ==
2413 2414 UserGroup.users_group_id)\
2414 2415 .join(
2415 2416 UserGroupMember,
2416 2417 UserGroupRepoToPerm.users_group_id ==
2417 2418 UserGroupMember.users_group_id)\
2418 2419 .filter(
2419 2420 UserGroupMember.user_id == user_id,
2420 2421 UserGroup.users_group_active == true())
2421 2422 if repo_id:
2422 2423 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2423 2424 return q.all()
2424 2425
2425 2426 @classmethod
2426 2427 def get_default_group_perms(cls, user_id, repo_group_id=None):
2427 2428 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2428 2429 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2429 2430 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2430 2431 .filter(UserRepoGroupToPerm.user_id == user_id)
2431 2432 if repo_group_id:
2432 2433 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2433 2434 return q.all()
2434 2435
2435 2436 @classmethod
2436 2437 def get_default_group_perms_from_user_group(
2437 2438 cls, user_id, repo_group_id=None):
2438 2439 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2439 2440 .join(
2440 2441 Permission,
2441 2442 UserGroupRepoGroupToPerm.permission_id ==
2442 2443 Permission.permission_id)\
2443 2444 .join(
2444 2445 RepoGroup,
2445 2446 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2446 2447 .join(
2447 2448 UserGroup,
2448 2449 UserGroupRepoGroupToPerm.users_group_id ==
2449 2450 UserGroup.users_group_id)\
2450 2451 .join(
2451 2452 UserGroupMember,
2452 2453 UserGroupRepoGroupToPerm.users_group_id ==
2453 2454 UserGroupMember.users_group_id)\
2454 2455 .filter(
2455 2456 UserGroupMember.user_id == user_id,
2456 2457 UserGroup.users_group_active == true())
2457 2458 if repo_group_id:
2458 2459 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2459 2460 return q.all()
2460 2461
2461 2462 @classmethod
2462 2463 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2463 2464 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2464 2465 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2465 2466 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2466 2467 .filter(UserUserGroupToPerm.user_id == user_id)
2467 2468 if user_group_id:
2468 2469 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2469 2470 return q.all()
2470 2471
2471 2472 @classmethod
2472 2473 def get_default_user_group_perms_from_user_group(
2473 2474 cls, user_id, user_group_id=None):
2474 2475 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2475 2476 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2476 2477 .join(
2477 2478 Permission,
2478 2479 UserGroupUserGroupToPerm.permission_id ==
2479 2480 Permission.permission_id)\
2480 2481 .join(
2481 2482 TargetUserGroup,
2482 2483 UserGroupUserGroupToPerm.target_user_group_id ==
2483 2484 TargetUserGroup.users_group_id)\
2484 2485 .join(
2485 2486 UserGroup,
2486 2487 UserGroupUserGroupToPerm.user_group_id ==
2487 2488 UserGroup.users_group_id)\
2488 2489 .join(
2489 2490 UserGroupMember,
2490 2491 UserGroupUserGroupToPerm.user_group_id ==
2491 2492 UserGroupMember.users_group_id)\
2492 2493 .filter(
2493 2494 UserGroupMember.user_id == user_id,
2494 2495 UserGroup.users_group_active == true())
2495 2496 if user_group_id:
2496 2497 q = q.filter(
2497 2498 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2498 2499
2499 2500 return q.all()
2500 2501
2501 2502
2502 2503 class UserRepoToPerm(Base, BaseModel):
2503 2504 __tablename__ = 'repo_to_perm'
2504 2505 __table_args__ = (
2505 2506 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2506 2507 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2507 2508 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2508 2509 )
2509 2510 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2510 2511 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2511 2512 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2512 2513 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2513 2514
2514 2515 user = relationship('User')
2515 2516 repository = relationship('Repository')
2516 2517 permission = relationship('Permission')
2517 2518
2518 2519 @classmethod
2519 2520 def create(cls, user, repository, permission):
2520 2521 n = cls()
2521 2522 n.user = user
2522 2523 n.repository = repository
2523 2524 n.permission = permission
2524 2525 Session().add(n)
2525 2526 return n
2526 2527
2527 2528 def __unicode__(self):
2528 2529 return u'<%s => %s >' % (self.user, self.repository)
2529 2530
2530 2531
2531 2532 class UserUserGroupToPerm(Base, BaseModel):
2532 2533 __tablename__ = 'user_user_group_to_perm'
2533 2534 __table_args__ = (
2534 2535 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2535 2536 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2536 2537 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2537 2538 )
2538 2539 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2539 2540 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2540 2541 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2541 2542 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2542 2543
2543 2544 user = relationship('User')
2544 2545 user_group = relationship('UserGroup')
2545 2546 permission = relationship('Permission')
2546 2547
2547 2548 @classmethod
2548 2549 def create(cls, user, user_group, permission):
2549 2550 n = cls()
2550 2551 n.user = user
2551 2552 n.user_group = user_group
2552 2553 n.permission = permission
2553 2554 Session().add(n)
2554 2555 return n
2555 2556
2556 2557 def __unicode__(self):
2557 2558 return u'<%s => %s >' % (self.user, self.user_group)
2558 2559
2559 2560
2560 2561 class UserToPerm(Base, BaseModel):
2561 2562 __tablename__ = 'user_to_perm'
2562 2563 __table_args__ = (
2563 2564 UniqueConstraint('user_id', 'permission_id'),
2564 2565 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2565 2566 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2566 2567 )
2567 2568 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2568 2569 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2569 2570 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2570 2571
2571 2572 user = relationship('User')
2572 2573 permission = relationship('Permission', lazy='joined')
2573 2574
2574 2575 def __unicode__(self):
2575 2576 return u'<%s => %s >' % (self.user, self.permission)
2576 2577
2577 2578
2578 2579 class UserGroupRepoToPerm(Base, BaseModel):
2579 2580 __tablename__ = 'users_group_repo_to_perm'
2580 2581 __table_args__ = (
2581 2582 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2582 2583 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2583 2584 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2584 2585 )
2585 2586 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2586 2587 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2587 2588 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2588 2589 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2589 2590
2590 2591 users_group = relationship('UserGroup')
2591 2592 permission = relationship('Permission')
2592 2593 repository = relationship('Repository')
2593 2594
2594 2595 @classmethod
2595 2596 def create(cls, users_group, repository, permission):
2596 2597 n = cls()
2597 2598 n.users_group = users_group
2598 2599 n.repository = repository
2599 2600 n.permission = permission
2600 2601 Session().add(n)
2601 2602 return n
2602 2603
2603 2604 def __unicode__(self):
2604 2605 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2605 2606
2606 2607
2607 2608 class UserGroupUserGroupToPerm(Base, BaseModel):
2608 2609 __tablename__ = 'user_group_user_group_to_perm'
2609 2610 __table_args__ = (
2610 2611 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2611 2612 CheckConstraint('target_user_group_id != user_group_id'),
2612 2613 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2613 2614 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2614 2615 )
2615 2616 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2616 2617 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2617 2618 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2618 2619 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2619 2620
2620 2621 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2621 2622 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2622 2623 permission = relationship('Permission')
2623 2624
2624 2625 @classmethod
2625 2626 def create(cls, target_user_group, user_group, permission):
2626 2627 n = cls()
2627 2628 n.target_user_group = target_user_group
2628 2629 n.user_group = user_group
2629 2630 n.permission = permission
2630 2631 Session().add(n)
2631 2632 return n
2632 2633
2633 2634 def __unicode__(self):
2634 2635 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2635 2636
2636 2637
2637 2638 class UserGroupToPerm(Base, BaseModel):
2638 2639 __tablename__ = 'users_group_to_perm'
2639 2640 __table_args__ = (
2640 2641 UniqueConstraint('users_group_id', 'permission_id',),
2641 2642 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2642 2643 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2643 2644 )
2644 2645 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2645 2646 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2646 2647 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2647 2648
2648 2649 users_group = relationship('UserGroup')
2649 2650 permission = relationship('Permission')
2650 2651
2651 2652
2652 2653 class UserRepoGroupToPerm(Base, BaseModel):
2653 2654 __tablename__ = 'user_repo_group_to_perm'
2654 2655 __table_args__ = (
2655 2656 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2656 2657 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2657 2658 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2658 2659 )
2659 2660
2660 2661 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2661 2662 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2662 2663 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2663 2664 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2664 2665
2665 2666 user = relationship('User')
2666 2667 group = relationship('RepoGroup')
2667 2668 permission = relationship('Permission')
2668 2669
2669 2670 @classmethod
2670 2671 def create(cls, user, repository_group, permission):
2671 2672 n = cls()
2672 2673 n.user = user
2673 2674 n.group = repository_group
2674 2675 n.permission = permission
2675 2676 Session().add(n)
2676 2677 return n
2677 2678
2678 2679
2679 2680 class UserGroupRepoGroupToPerm(Base, BaseModel):
2680 2681 __tablename__ = 'users_group_repo_group_to_perm'
2681 2682 __table_args__ = (
2682 2683 UniqueConstraint('users_group_id', 'group_id'),
2683 2684 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2684 2685 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2685 2686 )
2686 2687
2687 2688 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2688 2689 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2689 2690 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2690 2691 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2691 2692
2692 2693 users_group = relationship('UserGroup')
2693 2694 permission = relationship('Permission')
2694 2695 group = relationship('RepoGroup')
2695 2696
2696 2697 @classmethod
2697 2698 def create(cls, user_group, repository_group, permission):
2698 2699 n = cls()
2699 2700 n.users_group = user_group
2700 2701 n.group = repository_group
2701 2702 n.permission = permission
2702 2703 Session().add(n)
2703 2704 return n
2704 2705
2705 2706 def __unicode__(self):
2706 2707 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2707 2708
2708 2709
2709 2710 class Statistics(Base, BaseModel):
2710 2711 __tablename__ = 'statistics'
2711 2712 __table_args__ = (
2712 2713 UniqueConstraint('repository_id'),
2713 2714 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2714 2715 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2715 2716 )
2716 2717 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2717 2718 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2718 2719 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2719 2720 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2720 2721 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2721 2722 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2722 2723
2723 2724 repository = relationship('Repository', single_parent=True)
2724 2725
2725 2726
2726 2727 class UserFollowing(Base, BaseModel):
2727 2728 __tablename__ = 'user_followings'
2728 2729 __table_args__ = (
2729 2730 UniqueConstraint('user_id', 'follows_repository_id'),
2730 2731 UniqueConstraint('user_id', 'follows_user_id'),
2731 2732 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2732 2733 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2733 2734 )
2734 2735
2735 2736 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2736 2737 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2737 2738 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2738 2739 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2739 2740 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2740 2741
2741 2742 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2742 2743
2743 2744 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2744 2745 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2745 2746
2746 2747 @classmethod
2747 2748 def get_repo_followers(cls, repo_id):
2748 2749 return cls.query().filter(cls.follows_repo_id == repo_id)
2749 2750
2750 2751
2751 2752 class CacheKey(Base, BaseModel):
2752 2753 __tablename__ = 'cache_invalidation'
2753 2754 __table_args__ = (
2754 2755 UniqueConstraint('cache_key'),
2755 2756 Index('key_idx', 'cache_key'),
2756 2757 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2757 2758 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2758 2759 )
2759 2760 CACHE_TYPE_ATOM = 'ATOM'
2760 2761 CACHE_TYPE_RSS = 'RSS'
2761 2762 CACHE_TYPE_README = 'README'
2762 2763
2763 2764 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2764 2765 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2765 2766 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2766 2767 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2767 2768
2768 2769 def __init__(self, cache_key, cache_args=''):
2769 2770 self.cache_key = cache_key
2770 2771 self.cache_args = cache_args
2771 2772 self.cache_active = False
2772 2773
2773 2774 def __unicode__(self):
2774 2775 return u"<%s('%s:%s[%s]')>" % (
2775 2776 self.__class__.__name__,
2776 2777 self.cache_id, self.cache_key, self.cache_active)
2777 2778
2778 2779 def _cache_key_partition(self):
2779 2780 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2780 2781 return prefix, repo_name, suffix
2781 2782
2782 2783 def get_prefix(self):
2783 2784 """
2784 2785 Try to extract prefix from existing cache key. The key could consist
2785 2786 of prefix, repo_name, suffix
2786 2787 """
2787 2788 # this returns prefix, repo_name, suffix
2788 2789 return self._cache_key_partition()[0]
2789 2790
2790 2791 def get_suffix(self):
2791 2792 """
2792 2793 get suffix that might have been used in _get_cache_key to
2793 2794 generate self.cache_key. Only used for informational purposes
2794 2795 in repo_edit.html.
2795 2796 """
2796 2797 # prefix, repo_name, suffix
2797 2798 return self._cache_key_partition()[2]
2798 2799
2799 2800 @classmethod
2800 2801 def delete_all_cache(cls):
2801 2802 """
2802 2803 Delete all cache keys from database.
2803 2804 Should only be run when all instances are down and all entries
2804 2805 thus stale.
2805 2806 """
2806 2807 cls.query().delete()
2807 2808 Session().commit()
2808 2809
2809 2810 @classmethod
2810 2811 def get_cache_key(cls, repo_name, cache_type):
2811 2812 """
2812 2813
2813 2814 Generate a cache key for this process of RhodeCode instance.
2814 2815 Prefix most likely will be process id or maybe explicitly set
2815 2816 instance_id from .ini file.
2816 2817 """
2817 2818 import rhodecode
2818 2819 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2819 2820
2820 2821 repo_as_unicode = safe_unicode(repo_name)
2821 2822 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2822 2823 if cache_type else repo_as_unicode
2823 2824
2824 2825 return u'{}{}'.format(prefix, key)
2825 2826
2826 2827 @classmethod
2827 2828 def set_invalidate(cls, repo_name, delete=False):
2828 2829 """
2829 2830 Mark all caches of a repo as invalid in the database.
2830 2831 """
2831 2832
2832 2833 try:
2833 2834 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2834 2835 if delete:
2835 2836 log.debug('cache objects deleted for repo %s',
2836 2837 safe_str(repo_name))
2837 2838 qry.delete()
2838 2839 else:
2839 2840 log.debug('cache objects marked as invalid for repo %s',
2840 2841 safe_str(repo_name))
2841 2842 qry.update({"cache_active": False})
2842 2843
2843 2844 Session().commit()
2844 2845 except Exception:
2845 2846 log.exception(
2846 2847 'Cache key invalidation failed for repository %s',
2847 2848 safe_str(repo_name))
2848 2849 Session().rollback()
2849 2850
2850 2851 @classmethod
2851 2852 def get_active_cache(cls, cache_key):
2852 2853 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2853 2854 if inv_obj:
2854 2855 return inv_obj
2855 2856 return None
2856 2857
2857 2858 @classmethod
2858 2859 def repo_context_cache(cls, compute_func, repo_name, cache_type):
2859 2860 """
2860 2861 @cache_region('long_term')
2861 2862 def _heavy_calculation(cache_key):
2862 2863 return 'result'
2863 2864
2864 2865 cache_context = CacheKey.repo_context_cache(
2865 2866 _heavy_calculation, repo_name, cache_type)
2866 2867
2867 2868 with cache_context as context:
2868 2869 context.invalidate()
2869 2870 computed = context.compute()
2870 2871
2871 2872 assert computed == 'result'
2872 2873 """
2873 2874 from rhodecode.lib import caches
2874 2875 return caches.InvalidationContext(compute_func, repo_name, cache_type)
2875 2876
2876 2877
2877 2878 class ChangesetComment(Base, BaseModel):
2878 2879 __tablename__ = 'changeset_comments'
2879 2880 __table_args__ = (
2880 2881 Index('cc_revision_idx', 'revision'),
2881 2882 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2882 2883 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2883 2884 )
2884 2885
2885 2886 COMMENT_OUTDATED = u'comment_outdated'
2886 2887
2887 2888 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2888 2889 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2889 2890 revision = Column('revision', String(40), nullable=True)
2890 2891 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2891 2892 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2892 2893 line_no = Column('line_no', Unicode(10), nullable=True)
2893 2894 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2894 2895 f_path = Column('f_path', Unicode(1000), nullable=True)
2895 2896 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2896 2897 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2897 2898 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2898 2899 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2899 2900 renderer = Column('renderer', Unicode(64), nullable=True)
2900 2901 display_state = Column('display_state', Unicode(128), nullable=True)
2901 2902
2902 2903 author = relationship('User', lazy='joined')
2903 2904 repo = relationship('Repository')
2904 2905 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2905 2906 pull_request = relationship('PullRequest', lazy='joined')
2906 2907 pull_request_version = relationship('PullRequestVersion')
2907 2908
2908 2909 @classmethod
2909 2910 def get_users(cls, revision=None, pull_request_id=None):
2910 2911 """
2911 2912 Returns user associated with this ChangesetComment. ie those
2912 2913 who actually commented
2913 2914
2914 2915 :param cls:
2915 2916 :param revision:
2916 2917 """
2917 2918 q = Session().query(User)\
2918 2919 .join(ChangesetComment.author)
2919 2920 if revision:
2920 2921 q = q.filter(cls.revision == revision)
2921 2922 elif pull_request_id:
2922 2923 q = q.filter(cls.pull_request_id == pull_request_id)
2923 2924 return q.all()
2924 2925
2925 2926 def render(self, mentions=False):
2926 2927 from rhodecode.lib import helpers as h
2927 2928 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2928 2929
2929 2930 def __repr__(self):
2930 2931 if self.comment_id:
2931 2932 return '<DB:ChangesetComment #%s>' % self.comment_id
2932 2933 else:
2933 2934 return '<DB:ChangesetComment at %#x>' % id(self)
2934 2935
2935 2936
2936 2937 class ChangesetStatus(Base, BaseModel):
2937 2938 __tablename__ = 'changeset_statuses'
2938 2939 __table_args__ = (
2939 2940 Index('cs_revision_idx', 'revision'),
2940 2941 Index('cs_version_idx', 'version'),
2941 2942 UniqueConstraint('repo_id', 'revision', 'version'),
2942 2943 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2943 2944 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2944 2945 )
2945 2946 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2946 2947 STATUS_APPROVED = 'approved'
2947 2948 STATUS_REJECTED = 'rejected'
2948 2949 STATUS_UNDER_REVIEW = 'under_review'
2949 2950
2950 2951 STATUSES = [
2951 2952 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
2952 2953 (STATUS_APPROVED, _("Approved")),
2953 2954 (STATUS_REJECTED, _("Rejected")),
2954 2955 (STATUS_UNDER_REVIEW, _("Under Review")),
2955 2956 ]
2956 2957
2957 2958 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
2958 2959 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2959 2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
2960 2961 revision = Column('revision', String(40), nullable=False)
2961 2962 status = Column('status', String(128), nullable=False, default=DEFAULT)
2962 2963 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
2963 2964 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
2964 2965 version = Column('version', Integer(), nullable=False, default=0)
2965 2966 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2966 2967
2967 2968 author = relationship('User', lazy='joined')
2968 2969 repo = relationship('Repository')
2969 2970 comment = relationship('ChangesetComment', lazy='joined')
2970 2971 pull_request = relationship('PullRequest', lazy='joined')
2971 2972
2972 2973 def __unicode__(self):
2973 2974 return u"<%s('%s[%s]:%s')>" % (
2974 2975 self.__class__.__name__,
2975 2976 self.status, self.version, self.author
2976 2977 )
2977 2978
2978 2979 @classmethod
2979 2980 def get_status_lbl(cls, value):
2980 2981 return dict(cls.STATUSES).get(value)
2981 2982
2982 2983 @property
2983 2984 def status_lbl(self):
2984 2985 return ChangesetStatus.get_status_lbl(self.status)
2985 2986
2986 2987
2987 2988 class _PullRequestBase(BaseModel):
2988 2989 """
2989 2990 Common attributes of pull request and version entries.
2990 2991 """
2991 2992
2992 2993 # .status values
2993 2994 STATUS_NEW = u'new'
2994 2995 STATUS_OPEN = u'open'
2995 2996 STATUS_CLOSED = u'closed'
2996 2997
2997 2998 title = Column('title', Unicode(255), nullable=True)
2998 2999 description = Column(
2999 3000 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3000 3001 nullable=True)
3001 3002 # new/open/closed status of pull request (not approve/reject/etc)
3002 3003 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3003 3004 created_on = Column(
3004 3005 'created_on', DateTime(timezone=False), nullable=False,
3005 3006 default=datetime.datetime.now)
3006 3007 updated_on = Column(
3007 3008 'updated_on', DateTime(timezone=False), nullable=False,
3008 3009 default=datetime.datetime.now)
3009 3010
3010 3011 @declared_attr
3011 3012 def user_id(cls):
3012 3013 return Column(
3013 3014 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3014 3015 unique=None)
3015 3016
3016 3017 # 500 revisions max
3017 3018 _revisions = Column(
3018 3019 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3019 3020
3020 3021 @declared_attr
3021 3022 def source_repo_id(cls):
3022 3023 # TODO: dan: rename column to source_repo_id
3023 3024 return Column(
3024 3025 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3025 3026 nullable=False)
3026 3027
3027 3028 source_ref = Column('org_ref', Unicode(255), nullable=False)
3028 3029
3029 3030 @declared_attr
3030 3031 def target_repo_id(cls):
3031 3032 # TODO: dan: rename column to target_repo_id
3032 3033 return Column(
3033 3034 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3034 3035 nullable=False)
3035 3036
3036 3037 target_ref = Column('other_ref', Unicode(255), nullable=False)
3037 3038
3038 3039 # TODO: dan: rename column to last_merge_source_rev
3039 3040 _last_merge_source_rev = Column(
3040 3041 'last_merge_org_rev', String(40), nullable=True)
3041 3042 # TODO: dan: rename column to last_merge_target_rev
3042 3043 _last_merge_target_rev = Column(
3043 3044 'last_merge_other_rev', String(40), nullable=True)
3044 3045 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3045 3046 merge_rev = Column('merge_rev', String(40), nullable=True)
3046 3047
3047 3048 @hybrid_property
3048 3049 def revisions(self):
3049 3050 return self._revisions.split(':') if self._revisions else []
3050 3051
3051 3052 @revisions.setter
3052 3053 def revisions(self, val):
3053 3054 self._revisions = ':'.join(val)
3054 3055
3055 3056 @declared_attr
3056 3057 def author(cls):
3057 3058 return relationship('User', lazy='joined')
3058 3059
3059 3060 @declared_attr
3060 3061 def source_repo(cls):
3061 3062 return relationship(
3062 3063 'Repository',
3063 3064 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3064 3065
3065 3066 @property
3066 3067 def source_ref_parts(self):
3067 3068 refs = self.source_ref.split(':')
3068 3069 return Reference(refs[0], refs[1], refs[2])
3069 3070
3070 3071 @declared_attr
3071 3072 def target_repo(cls):
3072 3073 return relationship(
3073 3074 'Repository',
3074 3075 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3075 3076
3076 3077 @property
3077 3078 def target_ref_parts(self):
3078 3079 refs = self.target_ref.split(':')
3079 3080 return Reference(refs[0], refs[1], refs[2])
3080 3081
3081 3082
3082 3083 class PullRequest(Base, _PullRequestBase):
3083 3084 __tablename__ = 'pull_requests'
3084 3085 __table_args__ = (
3085 3086 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3086 3087 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3087 3088 )
3088 3089
3089 3090 pull_request_id = Column(
3090 3091 'pull_request_id', Integer(), nullable=False, primary_key=True)
3091 3092
3092 3093 def __repr__(self):
3093 3094 if self.pull_request_id:
3094 3095 return '<DB:PullRequest #%s>' % self.pull_request_id
3095 3096 else:
3096 3097 return '<DB:PullRequest at %#x>' % id(self)
3097 3098
3098 3099 reviewers = relationship('PullRequestReviewers',
3099 3100 cascade="all, delete, delete-orphan")
3100 3101 statuses = relationship('ChangesetStatus')
3101 3102 comments = relationship('ChangesetComment',
3102 3103 cascade="all, delete, delete-orphan")
3103 3104 versions = relationship('PullRequestVersion',
3104 3105 cascade="all, delete, delete-orphan")
3105 3106
3106 3107 def is_closed(self):
3107 3108 return self.status == self.STATUS_CLOSED
3108 3109
3109 3110 def get_api_data(self):
3110 3111 from rhodecode.model.pull_request import PullRequestModel
3111 3112 pull_request = self
3112 3113 merge_status = PullRequestModel().merge_status(pull_request)
3113 3114 data = {
3114 3115 'pull_request_id': pull_request.pull_request_id,
3115 'url': url('pullrequest_show',
3116 repo_name=pull_request.target_repo.repo_name,
3117 pull_request_id=pull_request.pull_request_id,
3116 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name,
3117 pull_request_id=self.pull_request_id,
3118 3118 qualified=True),
3119 3119 'title': pull_request.title,
3120 3120 'description': pull_request.description,
3121 3121 'status': pull_request.status,
3122 3122 'created_on': pull_request.created_on,
3123 3123 'updated_on': pull_request.updated_on,
3124 3124 'commit_ids': pull_request.revisions,
3125 3125 'review_status': pull_request.calculated_review_status(),
3126 3126 'mergeable': {
3127 3127 'status': merge_status[0],
3128 3128 'message': unicode(merge_status[1]),
3129 3129 },
3130 3130 'source': {
3131 3131 'clone_url': pull_request.source_repo.clone_url(),
3132 3132 'repository': pull_request.source_repo.repo_name,
3133 3133 'reference': {
3134 3134 'name': pull_request.source_ref_parts.name,
3135 3135 'type': pull_request.source_ref_parts.type,
3136 3136 'commit_id': pull_request.source_ref_parts.commit_id,
3137 3137 },
3138 3138 },
3139 3139 'target': {
3140 3140 'clone_url': pull_request.target_repo.clone_url(),
3141 3141 'repository': pull_request.target_repo.repo_name,
3142 3142 'reference': {
3143 3143 'name': pull_request.target_ref_parts.name,
3144 3144 'type': pull_request.target_ref_parts.type,
3145 3145 'commit_id': pull_request.target_ref_parts.commit_id,
3146 3146 },
3147 3147 },
3148 3148 'author': pull_request.author.get_api_data(include_secrets=False,
3149 3149 details='basic'),
3150 3150 'reviewers': [
3151 3151 {
3152 3152 'user': reviewer.get_api_data(include_secrets=False,
3153 3153 details='basic'),
3154 3154 'review_status': st[0][1].status if st else 'not_reviewed',
3155 3155 }
3156 3156 for reviewer, st in pull_request.reviewers_statuses()
3157 3157 ]
3158 3158 }
3159 3159
3160 3160 return data
3161 3161
3162 3162 def __json__(self):
3163 3163 return {
3164 3164 'revisions': self.revisions,
3165 3165 }
3166 3166
3167 3167 def calculated_review_status(self):
3168 3168 # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html
3169 3169 # because it's tricky on how to use ChangesetStatusModel from there
3170 3170 warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning)
3171 3171 from rhodecode.model.changeset_status import ChangesetStatusModel
3172 3172 return ChangesetStatusModel().calculated_review_status(self)
3173 3173
3174 3174 def reviewers_statuses(self):
3175 3175 warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning)
3176 3176 from rhodecode.model.changeset_status import ChangesetStatusModel
3177 3177 return ChangesetStatusModel().reviewers_statuses(self)
3178 3178
3179 3179
3180 3180 class PullRequestVersion(Base, _PullRequestBase):
3181 3181 __tablename__ = 'pull_request_versions'
3182 3182 __table_args__ = (
3183 3183 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3184 3184 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3185 3185 )
3186 3186
3187 3187 pull_request_version_id = Column(
3188 3188 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3189 3189 pull_request_id = Column(
3190 3190 'pull_request_id', Integer(),
3191 3191 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3192 3192 pull_request = relationship('PullRequest')
3193 3193
3194 3194 def __repr__(self):
3195 3195 if self.pull_request_version_id:
3196 3196 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3197 3197 else:
3198 3198 return '<DB:PullRequestVersion at %#x>' % id(self)
3199 3199
3200 3200
3201 3201 class PullRequestReviewers(Base, BaseModel):
3202 3202 __tablename__ = 'pull_request_reviewers'
3203 3203 __table_args__ = (
3204 3204 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3205 3205 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3206 3206 )
3207 3207
3208 3208 def __init__(self, user=None, pull_request=None):
3209 3209 self.user = user
3210 3210 self.pull_request = pull_request
3211 3211
3212 3212 pull_requests_reviewers_id = Column(
3213 3213 'pull_requests_reviewers_id', Integer(), nullable=False,
3214 3214 primary_key=True)
3215 3215 pull_request_id = Column(
3216 3216 "pull_request_id", Integer(),
3217 3217 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3218 3218 user_id = Column(
3219 3219 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3220 3220
3221 3221 user = relationship('User')
3222 3222 pull_request = relationship('PullRequest')
3223 3223
3224 3224
3225 3225 class Notification(Base, BaseModel):
3226 3226 __tablename__ = 'notifications'
3227 3227 __table_args__ = (
3228 3228 Index('notification_type_idx', 'type'),
3229 3229 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3230 3230 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3231 3231 )
3232 3232
3233 3233 TYPE_CHANGESET_COMMENT = u'cs_comment'
3234 3234 TYPE_MESSAGE = u'message'
3235 3235 TYPE_MENTION = u'mention'
3236 3236 TYPE_REGISTRATION = u'registration'
3237 3237 TYPE_PULL_REQUEST = u'pull_request'
3238 3238 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3239 3239
3240 3240 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3241 3241 subject = Column('subject', Unicode(512), nullable=True)
3242 3242 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3243 3243 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3244 3244 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3245 3245 type_ = Column('type', Unicode(255))
3246 3246
3247 3247 created_by_user = relationship('User')
3248 3248 notifications_to_users = relationship('UserNotification', lazy='joined',
3249 3249 cascade="all, delete, delete-orphan")
3250 3250
3251 3251 @property
3252 3252 def recipients(self):
3253 3253 return [x.user for x in UserNotification.query()\
3254 3254 .filter(UserNotification.notification == self)\
3255 3255 .order_by(UserNotification.user_id.asc()).all()]
3256 3256
3257 3257 @classmethod
3258 3258 def create(cls, created_by, subject, body, recipients, type_=None):
3259 3259 if type_ is None:
3260 3260 type_ = Notification.TYPE_MESSAGE
3261 3261
3262 3262 notification = cls()
3263 3263 notification.created_by_user = created_by
3264 3264 notification.subject = subject
3265 3265 notification.body = body
3266 3266 notification.type_ = type_
3267 3267 notification.created_on = datetime.datetime.now()
3268 3268
3269 3269 for u in recipients:
3270 3270 assoc = UserNotification()
3271 3271 assoc.notification = notification
3272 3272
3273 3273 # if created_by is inside recipients mark his notification
3274 3274 # as read
3275 3275 if u.user_id == created_by.user_id:
3276 3276 assoc.read = True
3277 3277
3278 3278 u.notifications.append(assoc)
3279 3279 Session().add(notification)
3280 3280
3281 3281 return notification
3282 3282
3283 3283 @property
3284 3284 def description(self):
3285 3285 from rhodecode.model.notification import NotificationModel
3286 3286 return NotificationModel().make_description(self)
3287 3287
3288 3288
3289 3289 class UserNotification(Base, BaseModel):
3290 3290 __tablename__ = 'user_to_notification'
3291 3291 __table_args__ = (
3292 3292 UniqueConstraint('user_id', 'notification_id'),
3293 3293 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3294 3294 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3295 3295 )
3296 3296 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3297 3297 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3298 3298 read = Column('read', Boolean, default=False)
3299 3299 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3300 3300
3301 3301 user = relationship('User', lazy="joined")
3302 3302 notification = relationship('Notification', lazy="joined",
3303 3303 order_by=lambda: Notification.created_on.desc(),)
3304 3304
3305 3305 def mark_as_read(self):
3306 3306 self.read = True
3307 3307 Session().add(self)
3308 3308
3309 3309
3310 3310 class Gist(Base, BaseModel):
3311 3311 __tablename__ = 'gists'
3312 3312 __table_args__ = (
3313 3313 Index('g_gist_access_id_idx', 'gist_access_id'),
3314 3314 Index('g_created_on_idx', 'created_on'),
3315 3315 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3316 3316 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3317 3317 )
3318 3318 GIST_PUBLIC = u'public'
3319 3319 GIST_PRIVATE = u'private'
3320 3320 DEFAULT_FILENAME = u'gistfile1.txt'
3321 3321
3322 3322 ACL_LEVEL_PUBLIC = u'acl_public'
3323 3323 ACL_LEVEL_PRIVATE = u'acl_private'
3324 3324
3325 3325 gist_id = Column('gist_id', Integer(), primary_key=True)
3326 3326 gist_access_id = Column('gist_access_id', Unicode(250))
3327 3327 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3328 3328 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3329 3329 gist_expires = Column('gist_expires', Float(53), nullable=False)
3330 3330 gist_type = Column('gist_type', Unicode(128), nullable=False)
3331 3331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3332 3332 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3333 3333 acl_level = Column('acl_level', Unicode(128), nullable=True)
3334 3334
3335 3335 owner = relationship('User')
3336 3336
3337 3337 def __repr__(self):
3338 3338 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3339 3339
3340 3340 @classmethod
3341 3341 def get_or_404(cls, id_):
3342 3342 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3343 3343 if not res:
3344 3344 raise HTTPNotFound
3345 3345 return res
3346 3346
3347 3347 @classmethod
3348 3348 def get_by_access_id(cls, gist_access_id):
3349 3349 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3350 3350
3351 3351 def gist_url(self):
3352 3352 import rhodecode
3353 3353 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3354 3354 if alias_url:
3355 3355 return alias_url.replace('{gistid}', self.gist_access_id)
3356 3356
3357 3357 return url('gist', gist_id=self.gist_access_id, qualified=True)
3358 3358
3359 3359 @classmethod
3360 3360 def base_path(cls):
3361 3361 """
3362 3362 Returns base path when all gists are stored
3363 3363
3364 3364 :param cls:
3365 3365 """
3366 3366 from rhodecode.model.gist import GIST_STORE_LOC
3367 3367 q = Session().query(RhodeCodeUi)\
3368 3368 .filter(RhodeCodeUi.ui_key == URL_SEP)
3369 3369 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3370 3370 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3371 3371
3372 3372 def get_api_data(self):
3373 3373 """
3374 3374 Common function for generating gist related data for API
3375 3375 """
3376 3376 gist = self
3377 3377 data = {
3378 3378 'gist_id': gist.gist_id,
3379 3379 'type': gist.gist_type,
3380 3380 'access_id': gist.gist_access_id,
3381 3381 'description': gist.gist_description,
3382 3382 'url': gist.gist_url(),
3383 3383 'expires': gist.gist_expires,
3384 3384 'created_on': gist.created_on,
3385 3385 'modified_at': gist.modified_at,
3386 3386 'content': None,
3387 3387 'acl_level': gist.acl_level,
3388 3388 }
3389 3389 return data
3390 3390
3391 3391 def __json__(self):
3392 3392 data = dict(
3393 3393 )
3394 3394 data.update(self.get_api_data())
3395 3395 return data
3396 3396 # SCM functions
3397 3397
3398 3398 def scm_instance(self, **kwargs):
3399 3399 from rhodecode.lib.vcs import get_repo
3400 3400 base_path = self.base_path()
3401 3401 return get_repo(os.path.join(*map(safe_str,
3402 3402 [base_path, self.gist_access_id])))
3403 3403
3404 3404
3405 3405 class DbMigrateVersion(Base, BaseModel):
3406 3406 __tablename__ = 'db_migrate_version'
3407 3407 __table_args__ = (
3408 3408 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3409 3409 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3410 3410 )
3411 3411 repository_id = Column('repository_id', String(250), primary_key=True)
3412 3412 repository_path = Column('repository_path', Text)
3413 3413 version = Column('version', Integer)
3414 3414
3415 3415
3416 3416 class ExternalIdentity(Base, BaseModel):
3417 3417 __tablename__ = 'external_identities'
3418 3418 __table_args__ = (
3419 3419 Index('local_user_id_idx', 'local_user_id'),
3420 3420 Index('external_id_idx', 'external_id'),
3421 3421 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3422 3422 'mysql_charset': 'utf8'})
3423 3423
3424 3424 external_id = Column('external_id', Unicode(255), default=u'',
3425 3425 primary_key=True)
3426 3426 external_username = Column('external_username', Unicode(1024), default=u'')
3427 3427 local_user_id = Column('local_user_id', Integer(),
3428 3428 ForeignKey('users.user_id'), primary_key=True)
3429 3429 provider_name = Column('provider_name', Unicode(255), default=u'',
3430 3430 primary_key=True)
3431 3431 access_token = Column('access_token', String(1024), default=u'')
3432 3432 alt_token = Column('alt_token', String(1024), default=u'')
3433 3433 token_secret = Column('token_secret', String(1024), default=u'')
3434 3434
3435 3435 @classmethod
3436 3436 def by_external_id_and_provider(cls, external_id, provider_name,
3437 3437 local_user_id=None):
3438 3438 """
3439 3439 Returns ExternalIdentity instance based on search params
3440 3440
3441 3441 :param external_id:
3442 3442 :param provider_name:
3443 3443 :return: ExternalIdentity
3444 3444 """
3445 3445 query = cls.query()
3446 3446 query = query.filter(cls.external_id == external_id)
3447 3447 query = query.filter(cls.provider_name == provider_name)
3448 3448 if local_user_id:
3449 3449 query = query.filter(cls.local_user_id == local_user_id)
3450 3450 return query.first()
3451 3451
3452 3452 @classmethod
3453 3453 def user_by_external_id_and_provider(cls, external_id, provider_name):
3454 3454 """
3455 3455 Returns User instance based on search params
3456 3456
3457 3457 :param external_id:
3458 3458 :param provider_name:
3459 3459 :return: User
3460 3460 """
3461 3461 query = User.query()
3462 3462 query = query.filter(cls.external_id == external_id)
3463 3463 query = query.filter(cls.provider_name == provider_name)
3464 3464 query = query.filter(User.user_id == cls.local_user_id)
3465 3465 return query.first()
3466 3466
3467 3467 @classmethod
3468 3468 def by_local_user_id(cls, local_user_id):
3469 3469 """
3470 3470 Returns all tokens for user
3471 3471
3472 3472 :param local_user_id:
3473 3473 :return: ExternalIdentity
3474 3474 """
3475 3475 query = cls.query()
3476 3476 query = query.filter(cls.local_user_id == local_user_id)
3477 3477 return query
@@ -1,1148 +1,1153 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30
31 31 from pylons.i18n.translation import _
32 32 from pylons.i18n.translation import lazy_ugettext
33 33
34 34 import rhodecode
35 35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 38 from rhodecode.lib.markup_renderer import (
39 39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 40 from rhodecode.lib.utils import action_logger
41 41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 42 from rhodecode.lib.vcs.backends.base import (
43 43 Reference, MergeResponse, MergeFailureReason)
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 CommitDoesNotExistError, EmptyRepositoryError)
46 46 from rhodecode.model import BaseModel
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import ChangesetCommentsModel
49 49 from rhodecode.model.db import (
50 50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 51 PullRequestVersion, ChangesetComment)
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.notification import NotificationModel, \
54 54 EmailNotificationModel
55 55 from rhodecode.model.scm import ScmModel
56 56 from rhodecode.model.settings import VcsSettingsModel
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class PullRequestModel(BaseModel):
63 63
64 64 cls = PullRequest
65 65
66 66 DIFF_CONTEXT = 3
67 67
68 68 MERGE_STATUS_MESSAGES = {
69 69 MergeFailureReason.NONE: lazy_ugettext(
70 70 'This pull request can be automatically merged.'),
71 71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 72 'This pull request cannot be merged because of an unhandled'
73 73 ' exception.'),
74 74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 75 'This pull request cannot be merged because of conflicts.'),
76 76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 77 'This pull request could not be merged because push to target'
78 78 ' failed.'),
79 79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 80 'This pull request cannot be merged because the target is not a'
81 81 ' head.'),
82 82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 83 'This pull request cannot be merged because the source contains'
84 84 ' more branches than the target.'),
85 85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 86 'This pull request cannot be merged because the target has'
87 87 ' multiple heads.'),
88 88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 89 'This pull request cannot be merged because the target repository'
90 90 ' is locked.'),
91 91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 92 'This pull request cannot be merged because the target or the '
93 93 'source reference is missing.'),
94 94 }
95 95
96 96 def __get_pull_request(self, pull_request):
97 97 return self._get_instance(PullRequest, pull_request)
98 98
99 99 def _check_perms(self, perms, pull_request, user, api=False):
100 100 if not api:
101 101 return h.HasRepoPermissionAny(*perms)(
102 102 user=user, repo_name=pull_request.target_repo.repo_name)
103 103 else:
104 104 return h.HasRepoPermissionAnyApi(*perms)(
105 105 user=user, repo_name=pull_request.target_repo.repo_name)
106 106
107 107 def check_user_read(self, pull_request, user, api=False):
108 108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 109 return self._check_perms(_perms, pull_request, user, api)
110 110
111 111 def check_user_merge(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_update(self, pull_request, user, api=False):
116 116 owner = user.user_id == pull_request.user_id
117 117 return self.check_user_merge(pull_request, user, api) or owner
118 118
119 119 def check_user_change_status(self, pull_request, user, api=False):
120 120 reviewer = user.user_id in [x.user_id for x in
121 121 pull_request.reviewers]
122 122 return self.check_user_update(pull_request, user, api) or reviewer
123 123
124 124 def get(self, pull_request):
125 125 return self.__get_pull_request(pull_request)
126 126
127 127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 128 opened_by=None, order_by=None,
129 129 order_dir='desc'):
130 130 repo = self._get_repo(repo_name)
131 131 q = PullRequest.query()
132 132 # source or target
133 133 if source:
134 134 q = q.filter(PullRequest.source_repo == repo)
135 135 else:
136 136 q = q.filter(PullRequest.target_repo == repo)
137 137
138 138 # closed,opened
139 139 if statuses:
140 140 q = q.filter(PullRequest.status.in_(statuses))
141 141
142 142 # opened by filter
143 143 if opened_by:
144 144 q = q.filter(PullRequest.user_id.in_(opened_by))
145 145
146 146 if order_by:
147 147 order_map = {
148 148 'name_raw': PullRequest.pull_request_id,
149 149 'title': PullRequest.title,
150 150 'updated_on_raw': PullRequest.updated_on
151 151 }
152 152 if order_dir == 'asc':
153 153 q = q.order_by(order_map[order_by].asc())
154 154 else:
155 155 q = q.order_by(order_map[order_by].desc())
156 156
157 157 return q
158 158
159 159 def count_all(self, repo_name, source=False, statuses=None,
160 160 opened_by=None):
161 161 """
162 162 Count the number of pull requests for a specific repository.
163 163
164 164 :param repo_name: target or source repo
165 165 :param source: boolean flag to specify if repo_name refers to source
166 166 :param statuses: list of pull request statuses
167 167 :param opened_by: author user of the pull request
168 168 :returns: int number of pull requests
169 169 """
170 170 q = self._prepare_get_all_query(
171 171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172 172
173 173 return q.count()
174 174
175 175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 176 offset=0, length=None, order_by=None, order_dir='desc'):
177 177 """
178 178 Get all pull requests for a specific repository.
179 179
180 180 :param repo_name: target or source repo
181 181 :param source: boolean flag to specify if repo_name refers to source
182 182 :param statuses: list of pull request statuses
183 183 :param opened_by: author user of the pull request
184 184 :param offset: pagination offset
185 185 :param length: length of returned list
186 186 :param order_by: order of the returned list
187 187 :param order_dir: 'asc' or 'desc' ordering direction
188 188 :returns: list of pull requests
189 189 """
190 190 q = self._prepare_get_all_query(
191 191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 192 order_by=order_by, order_dir=order_dir)
193 193
194 194 if length:
195 195 pull_requests = q.limit(length).offset(offset).all()
196 196 else:
197 197 pull_requests = q.all()
198 198
199 199 return pull_requests
200 200
201 201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 202 opened_by=None):
203 203 """
204 204 Count the number of pull requests for a specific repository that are
205 205 awaiting review.
206 206
207 207 :param repo_name: target or source repo
208 208 :param source: boolean flag to specify if repo_name refers to source
209 209 :param statuses: list of pull request statuses
210 210 :param opened_by: author user of the pull request
211 211 :returns: int number of pull requests
212 212 """
213 213 pull_requests = self.get_awaiting_review(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215 215
216 216 return len(pull_requests)
217 217
218 218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 219 opened_by=None, offset=0, length=None,
220 220 order_by=None, order_dir='desc'):
221 221 """
222 222 Get all pull requests for a specific repository that are awaiting
223 223 review.
224 224
225 225 :param repo_name: target or source repo
226 226 :param source: boolean flag to specify if repo_name refers to source
227 227 :param statuses: list of pull request statuses
228 228 :param opened_by: author user of the pull request
229 229 :param offset: pagination offset
230 230 :param length: length of returned list
231 231 :param order_by: order of the returned list
232 232 :param order_dir: 'asc' or 'desc' ordering direction
233 233 :returns: list of pull requests
234 234 """
235 235 pull_requests = self.get_all(
236 236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 237 order_by=order_by, order_dir=order_dir)
238 238
239 239 _filtered_pull_requests = []
240 240 for pr in pull_requests:
241 241 status = pr.calculated_review_status()
242 242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 244 _filtered_pull_requests.append(pr)
245 245 if length:
246 246 return _filtered_pull_requests[offset:offset+length]
247 247 else:
248 248 return _filtered_pull_requests
249 249
250 250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None, user_id=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review from a specific user.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :param user_id: reviewer user of the pull request
261 261 :returns: int number of pull requests
262 262 """
263 263 pull_requests = self.get_awaiting_my_review(
264 264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 265 user_id=user_id)
266 266
267 267 return len(pull_requests)
268 268
269 269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 270 opened_by=None, user_id=None, offset=0,
271 271 length=None, order_by=None, order_dir='desc'):
272 272 """
273 273 Get all pull requests for a specific repository that are awaiting
274 274 review from a specific user.
275 275
276 276 :param repo_name: target or source repo
277 277 :param source: boolean flag to specify if repo_name refers to source
278 278 :param statuses: list of pull request statuses
279 279 :param opened_by: author user of the pull request
280 280 :param user_id: reviewer user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _my = PullRequestModel().get_not_reviewed(user_id)
292 292 my_participation = []
293 293 for pr in pull_requests:
294 294 if pr in _my:
295 295 my_participation.append(pr)
296 296 _filtered_pull_requests = my_participation
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def get_not_reviewed(self, user_id):
303 303 return [
304 304 x.pull_request for x in PullRequestReviewers.query().filter(
305 305 PullRequestReviewers.user_id == user_id).all()
306 306 ]
307 307
308 308 def get_versions(self, pull_request):
309 309 """
310 310 returns version of pull request sorted by ID descending
311 311 """
312 312 return PullRequestVersion.query()\
313 313 .filter(PullRequestVersion.pull_request == pull_request)\
314 314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 315 .all()
316 316
317 317 def create(self, created_by, source_repo, source_ref, target_repo,
318 318 target_ref, revisions, reviewers, title, description=None):
319 319 created_by_user = self._get_user(created_by)
320 320 source_repo = self._get_repo(source_repo)
321 321 target_repo = self._get_repo(target_repo)
322 322
323 323 pull_request = PullRequest()
324 324 pull_request.source_repo = source_repo
325 325 pull_request.source_ref = source_ref
326 326 pull_request.target_repo = target_repo
327 327 pull_request.target_ref = target_ref
328 328 pull_request.revisions = revisions
329 329 pull_request.title = title
330 330 pull_request.description = description
331 331 pull_request.author = created_by_user
332 332
333 333 Session().add(pull_request)
334 334 Session().flush()
335 335
336 336 # members / reviewers
337 337 for user_id in set(reviewers):
338 338 user = self._get_user(user_id)
339 339 reviewer = PullRequestReviewers(user, pull_request)
340 340 Session().add(reviewer)
341 341
342 342 # Set approval status to "Under Review" for all commits which are
343 343 # part of this pull request.
344 344 ChangesetStatusModel().set_status(
345 345 repo=target_repo,
346 346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 347 user=created_by_user,
348 348 pull_request=pull_request
349 349 )
350 350
351 351 self.notify_reviewers(pull_request, reviewers)
352 352 self._trigger_pull_request_hook(
353 353 pull_request, created_by_user, 'create')
354 354
355 355 return pull_request
356 356
357 357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 358 pull_request = self.__get_pull_request(pull_request)
359 359 target_scm = pull_request.target_repo.scm_instance()
360 360 if action == 'create':
361 361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 362 elif action == 'merge':
363 363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 364 elif action == 'close':
365 365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 366 elif action == 'review_status_change':
367 367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 368 elif action == 'update':
369 369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 370 else:
371 371 return
372 372
373 373 trigger_hook(
374 374 username=user.username,
375 375 repo_name=pull_request.target_repo.repo_name,
376 376 repo_alias=target_scm.alias,
377 377 pull_request=pull_request)
378 378
379 379 def _get_commit_ids(self, pull_request):
380 380 """
381 381 Return the commit ids of the merged pull request.
382 382
383 383 This method is not dealing correctly yet with the lack of autoupdates
384 384 nor with the implicit target updates.
385 385 For example: if a commit in the source repo is already in the target it
386 386 will be reported anyways.
387 387 """
388 388 merge_rev = pull_request.merge_rev
389 389 if merge_rev is None:
390 390 raise ValueError('This pull request was not merged yet')
391 391
392 392 commit_ids = list(pull_request.revisions)
393 393 if merge_rev not in commit_ids:
394 394 commit_ids.append(merge_rev)
395 395
396 396 return commit_ids
397 397
398 398 def merge(self, pull_request, user, extras):
399 399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 401 if merge_state.executed:
402 402 log.debug(
403 403 "Merge was successful, updating the pull request comments.")
404 404 self._comment_and_close_pr(pull_request, user, merge_state)
405 405 self._log_action('user_merged_pull_request', user, pull_request)
406 406 else:
407 407 log.warn("Merge failed, not updating the pull request.")
408 408 return merge_state
409 409
410 410 def _merge_pull_request(self, pull_request, user, extras):
411 411 target_vcs = pull_request.target_repo.scm_instance()
412 412 source_vcs = pull_request.source_repo.scm_instance()
413 413 target_ref = self._refresh_reference(
414 414 pull_request.target_ref_parts, target_vcs)
415 415
416 416 message = _(
417 417 'Merge pull request #%(pr_id)s from '
418 418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 419 'pr_id': pull_request.pull_request_id,
420 420 'source_repo': source_vcs.name,
421 421 'source_ref_name': pull_request.source_ref_parts.name,
422 422 'pr_title': pull_request.title
423 423 }
424 424
425 425 workspace_id = self._workspace_id(pull_request)
426 426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 428 use_rebase = self._use_rebase_for_merging(pull_request)
429 429
430 430 callback_daemon, extras = prepare_callback_daemon(
431 431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432 432
433 433 with callback_daemon:
434 434 # TODO: johbo: Implement a clean way to run a config_override
435 435 # for a single call.
436 436 target_vcs.config.set(
437 437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 438 merge_state = target_vcs.merge(
439 439 target_ref, source_vcs, pull_request.source_ref_parts,
440 440 workspace_id, user_name=user.username,
441 441 user_email=user.email, message=message, use_rebase=use_rebase)
442 442 return merge_state
443 443
444 444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 445 pull_request.merge_rev = merge_state.merge_commit_id
446 446 pull_request.updated_on = datetime.datetime.now()
447 447
448 448 ChangesetCommentsModel().create(
449 449 text=unicode(_('Pull request merged and closed')),
450 450 repo=pull_request.target_repo.repo_id,
451 451 user=user.user_id,
452 452 pull_request=pull_request.pull_request_id,
453 453 f_path=None,
454 454 line_no=None,
455 455 closing_pr=True
456 456 )
457 457
458 458 Session().add(pull_request)
459 459 Session().flush()
460 460 # TODO: paris: replace invalidation with less radical solution
461 461 ScmModel().mark_for_invalidation(
462 462 pull_request.target_repo.repo_name)
463 463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464 464
465 465 def has_valid_update_type(self, pull_request):
466 466 source_ref_type = pull_request.source_ref_parts.type
467 467 return source_ref_type in ['book', 'branch', 'tag']
468 468
469 469 def update_commits(self, pull_request):
470 470 """
471 471 Get the updated list of commits for the pull request
472 472 and return the new pull request version and the list
473 473 of commits processed by this update action
474 474 """
475 475
476 476 pull_request = self.__get_pull_request(pull_request)
477 477 source_ref_type = pull_request.source_ref_parts.type
478 478 source_ref_name = pull_request.source_ref_parts.name
479 479 source_ref_id = pull_request.source_ref_parts.commit_id
480 480
481 481 if not self.has_valid_update_type(pull_request):
482 482 log.debug(
483 483 "Skipping update of pull request %s due to ref type: %s",
484 484 pull_request, source_ref_type)
485 485 return (None, None)
486 486
487 487 source_repo = pull_request.source_repo.scm_instance()
488 488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 489 if source_ref_id == source_commit.raw_id:
490 490 log.debug("Nothing changed in pull request %s", pull_request)
491 491 return (None, None)
492 492
493 493 # Finally there is a need for an update
494 494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 495 self._link_comments_to_version(pull_request_version)
496 496
497 497 target_ref_type = pull_request.target_ref_parts.type
498 498 target_ref_name = pull_request.target_ref_parts.name
499 499 target_ref_id = pull_request.target_ref_parts.commit_id
500 500 target_repo = pull_request.target_repo.scm_instance()
501 501
502 502 if target_ref_type in ('tag', 'branch', 'book'):
503 503 target_commit = target_repo.get_commit(target_ref_name)
504 504 else:
505 505 target_commit = target_repo.get_commit(target_ref_id)
506 506
507 507 # re-compute commit ids
508 508 old_commit_ids = set(pull_request.revisions)
509 509 pre_load = ["author", "branch", "date", "message"]
510 510 commit_ranges = target_repo.compare(
511 511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 512 pre_load=pre_load)
513 513
514 514 ancestor = target_repo.get_common_ancestor(
515 515 target_commit.raw_id, source_commit.raw_id, source_repo)
516 516
517 517 pull_request.source_ref = '%s:%s:%s' % (
518 518 source_ref_type, source_ref_name, source_commit.raw_id)
519 519 pull_request.target_ref = '%s:%s:%s' % (
520 520 target_ref_type, target_ref_name, ancestor)
521 521 pull_request.revisions = [
522 522 commit.raw_id for commit in reversed(commit_ranges)]
523 523 pull_request.updated_on = datetime.datetime.now()
524 524 Session().add(pull_request)
525 525 new_commit_ids = set(pull_request.revisions)
526 526
527 527 changes = self._calculate_commit_id_changes(
528 528 old_commit_ids, new_commit_ids)
529 529
530 530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 531 pull_request, pull_request_version)
532 532
533 533 ChangesetCommentsModel().outdate_comments(
534 534 pull_request, old_diff_data=old_diff_data,
535 535 new_diff_data=new_diff_data)
536 536
537 537 file_changes = self._calculate_file_changes(
538 538 old_diff_data, new_diff_data)
539 539
540 540 # Add an automatic comment to the pull request
541 541 update_comment = ChangesetCommentsModel().create(
542 542 text=self._render_update_message(changes, file_changes),
543 543 repo=pull_request.target_repo,
544 544 user=pull_request.author,
545 545 pull_request=pull_request,
546 546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547 547
548 548 # Update status to "Under Review" for added commits
549 549 for commit_id in changes.added:
550 550 ChangesetStatusModel().set_status(
551 551 repo=pull_request.source_repo,
552 552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 553 comment=update_comment,
554 554 user=pull_request.author,
555 555 pull_request=pull_request,
556 556 revision=commit_id)
557 557
558 558 log.debug(
559 559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 560 'removed_ids: %s', pull_request.pull_request_id,
561 561 changes.added, changes.common, changes.removed)
562 562 log.debug('Updated pull request with the following file changes: %s',
563 563 file_changes)
564 564
565 565 log.info(
566 566 "Updated pull request %s from commit %s to commit %s, "
567 567 "stored new version %s of this pull request.",
568 568 pull_request.pull_request_id, source_ref_id,
569 569 pull_request.source_ref_parts.commit_id,
570 570 pull_request_version.pull_request_version_id)
571 571 Session().commit()
572 572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 573 'update')
574 574 return (pull_request_version, changes)
575 575
576 576 def _create_version_from_snapshot(self, pull_request):
577 577 version = PullRequestVersion()
578 578 version.title = pull_request.title
579 579 version.description = pull_request.description
580 580 version.status = pull_request.status
581 581 version.created_on = pull_request.created_on
582 582 version.updated_on = pull_request.updated_on
583 583 version.user_id = pull_request.user_id
584 584 version.source_repo = pull_request.source_repo
585 585 version.source_ref = pull_request.source_ref
586 586 version.target_repo = pull_request.target_repo
587 587 version.target_ref = pull_request.target_ref
588 588
589 589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 591 version._last_merge_status = pull_request._last_merge_status
592 592 version.merge_rev = pull_request.merge_rev
593 593
594 594 version.revisions = pull_request.revisions
595 595 version.pull_request = pull_request
596 596 Session().add(version)
597 597 Session().flush()
598 598
599 599 return version
600 600
601 601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 602 diff_context = (
603 603 self.DIFF_CONTEXT +
604 604 ChangesetCommentsModel.needed_extra_diff_context())
605 605 old_diff = self._get_diff_from_pr_or_version(
606 606 pull_request_version, context=diff_context)
607 607 new_diff = self._get_diff_from_pr_or_version(
608 608 pull_request, context=diff_context)
609 609
610 610 old_diff_data = diffs.DiffProcessor(old_diff)
611 611 old_diff_data.prepare()
612 612 new_diff_data = diffs.DiffProcessor(new_diff)
613 613 new_diff_data.prepare()
614 614
615 615 return old_diff_data, new_diff_data
616 616
617 617 def _link_comments_to_version(self, pull_request_version):
618 618 """
619 619 Link all unlinked comments of this pull request to the given version.
620 620
621 621 :param pull_request_version: The `PullRequestVersion` to which
622 622 the comments shall be linked.
623 623
624 624 """
625 625 pull_request = pull_request_version.pull_request
626 626 comments = ChangesetComment.query().filter(
627 627 # TODO: johbo: Should we query for the repo at all here?
628 628 # Pending decision on how comments of PRs are to be related
629 629 # to either the source repo, the target repo or no repo at all.
630 630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 631 ChangesetComment.pull_request == pull_request,
632 632 ChangesetComment.pull_request_version == None)
633 633
634 634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 635 # operation.
636 636 for comment in comments:
637 637 comment.pull_request_version_id = (
638 638 pull_request_version.pull_request_version_id)
639 639 Session().add(comment)
640 640
641 641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 642 added = new_ids.difference(old_ids)
643 643 common = old_ids.intersection(new_ids)
644 644 removed = old_ids.difference(new_ids)
645 645 return ChangeTuple(added, common, removed)
646 646
647 647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648 648
649 649 old_files = OrderedDict()
650 650 for diff_data in old_diff_data.parsed_diff:
651 651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652 652
653 653 added_files = []
654 654 modified_files = []
655 655 removed_files = []
656 656 for diff_data in new_diff_data.parsed_diff:
657 657 new_filename = diff_data['filename']
658 658 new_hash = md5_safe(diff_data['raw_diff'])
659 659
660 660 old_hash = old_files.get(new_filename)
661 661 if not old_hash:
662 662 # file is not present in old diff, means it's added
663 663 added_files.append(new_filename)
664 664 else:
665 665 if new_hash != old_hash:
666 666 modified_files.append(new_filename)
667 667 # now remove a file from old, since we have seen it already
668 668 del old_files[new_filename]
669 669
670 670 # removed files is when there are present in old, but not in NEW,
671 671 # since we remove old files that are present in new diff, left-overs
672 672 # if any should be the removed files
673 673 removed_files.extend(old_files.keys())
674 674
675 675 return FileChangeTuple(added_files, modified_files, removed_files)
676 676
677 677 def _render_update_message(self, changes, file_changes):
678 678 """
679 679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 680 so it's always looking the same disregarding on which default
681 681 renderer system is using.
682 682
683 683 :param changes: changes named tuple
684 684 :param file_changes: file changes named tuple
685 685
686 686 """
687 687 new_status = ChangesetStatus.get_status_lbl(
688 688 ChangesetStatus.STATUS_UNDER_REVIEW)
689 689
690 690 changed_files = (
691 691 file_changes.added + file_changes.modified + file_changes.removed)
692 692
693 693 params = {
694 694 'under_review_label': new_status,
695 695 'added_commits': changes.added,
696 696 'removed_commits': changes.removed,
697 697 'changed_files': changed_files,
698 698 'added_files': file_changes.added,
699 699 'modified_files': file_changes.modified,
700 700 'removed_files': file_changes.removed,
701 701 }
702 702 renderer = RstTemplateRenderer()
703 703 return renderer.render('pull_request_update.mako', **params)
704 704
705 705 def edit(self, pull_request, title, description):
706 706 pull_request = self.__get_pull_request(pull_request)
707 707 if pull_request.is_closed():
708 708 raise ValueError('This pull request is closed')
709 709 if title:
710 710 pull_request.title = title
711 711 pull_request.description = description
712 712 pull_request.updated_on = datetime.datetime.now()
713 713 Session().add(pull_request)
714 714
715 715 def update_reviewers(self, pull_request, reviewers_ids):
716 716 reviewers_ids = set(reviewers_ids)
717 717 pull_request = self.__get_pull_request(pull_request)
718 718 current_reviewers = PullRequestReviewers.query()\
719 719 .filter(PullRequestReviewers.pull_request ==
720 720 pull_request).all()
721 721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722 722
723 723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725 725
726 726 log.debug("Adding %s reviewers", ids_to_add)
727 727 log.debug("Removing %s reviewers", ids_to_remove)
728 728 changed = False
729 729 for uid in ids_to_add:
730 730 changed = True
731 731 _usr = self._get_user(uid)
732 732 reviewer = PullRequestReviewers(_usr, pull_request)
733 733 Session().add(reviewer)
734 734
735 735 self.notify_reviewers(pull_request, ids_to_add)
736 736
737 737 for uid in ids_to_remove:
738 738 changed = True
739 739 reviewer = PullRequestReviewers.query()\
740 740 .filter(PullRequestReviewers.user_id == uid,
741 741 PullRequestReviewers.pull_request == pull_request)\
742 742 .scalar()
743 743 if reviewer:
744 744 Session().delete(reviewer)
745 745 if changed:
746 746 pull_request.updated_on = datetime.datetime.now()
747 747 Session().add(pull_request)
748 748
749 749 return ids_to_add, ids_to_remove
750 750
751 def get_url(self, pull_request):
752 return url('pullrequest_show', repo_name=self.target_repo.repo_name,
753 pull_request_id=self.pull_request_id,
754 qualified=True)
755
751 756 def notify_reviewers(self, pull_request, reviewers_ids):
752 757 # notification to reviewers
753 758 if not reviewers_ids:
754 759 return
755 760
756 761 pull_request_obj = pull_request
757 762 # get the current participants of this pull request
758 763 recipients = reviewers_ids
759 764 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
760 765
761 766 pr_source_repo = pull_request_obj.source_repo
762 767 pr_target_repo = pull_request_obj.target_repo
763 768
764 769 pr_url = h.url(
765 770 'pullrequest_show',
766 771 repo_name=pr_target_repo.repo_name,
767 772 pull_request_id=pull_request_obj.pull_request_id,
768 773 qualified=True,)
769 774
770 775 # set some variables for email notification
771 776 pr_target_repo_url = h.url(
772 777 'summary_home',
773 778 repo_name=pr_target_repo.repo_name,
774 779 qualified=True)
775 780
776 781 pr_source_repo_url = h.url(
777 782 'summary_home',
778 783 repo_name=pr_source_repo.repo_name,
779 784 qualified=True)
780 785
781 786 # pull request specifics
782 787 pull_request_commits = [
783 788 (x.raw_id, x.message)
784 789 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
785 790
786 791 kwargs = {
787 792 'user': pull_request.author,
788 793 'pull_request': pull_request_obj,
789 794 'pull_request_commits': pull_request_commits,
790 795
791 796 'pull_request_target_repo': pr_target_repo,
792 797 'pull_request_target_repo_url': pr_target_repo_url,
793 798
794 799 'pull_request_source_repo': pr_source_repo,
795 800 'pull_request_source_repo_url': pr_source_repo_url,
796 801
797 802 'pull_request_url': pr_url,
798 803 }
799 804
800 805 # pre-generate the subject for notification itself
801 806 (subject,
802 807 _h, _e, # we don't care about those
803 808 body_plaintext) = EmailNotificationModel().render_email(
804 809 notification_type, **kwargs)
805 810
806 811 # create notification objects, and emails
807 812 NotificationModel().create(
808 813 created_by=pull_request.author,
809 814 notification_subject=subject,
810 815 notification_body=body_plaintext,
811 816 notification_type=notification_type,
812 817 recipients=recipients,
813 818 email_kwargs=kwargs,
814 819 )
815 820
816 821 def delete(self, pull_request):
817 822 pull_request = self.__get_pull_request(pull_request)
818 823 self._cleanup_merge_workspace(pull_request)
819 824 Session().delete(pull_request)
820 825
821 826 def close_pull_request(self, pull_request, user):
822 827 pull_request = self.__get_pull_request(pull_request)
823 828 self._cleanup_merge_workspace(pull_request)
824 829 pull_request.status = PullRequest.STATUS_CLOSED
825 830 pull_request.updated_on = datetime.datetime.now()
826 831 Session().add(pull_request)
827 832 self._trigger_pull_request_hook(
828 833 pull_request, pull_request.author, 'close')
829 834 self._log_action('user_closed_pull_request', user, pull_request)
830 835
831 836 def close_pull_request_with_comment(self, pull_request, user, repo,
832 837 message=None):
833 838 status = ChangesetStatus.STATUS_REJECTED
834 839
835 840 if not message:
836 841 message = (
837 842 _('Status change %(transition_icon)s %(status)s') % {
838 843 'transition_icon': '>',
839 844 'status': ChangesetStatus.get_status_lbl(status)})
840 845
841 846 internal_message = _('Closing with') + ' ' + message
842 847
843 848 comm = ChangesetCommentsModel().create(
844 849 text=internal_message,
845 850 repo=repo.repo_id,
846 851 user=user.user_id,
847 852 pull_request=pull_request.pull_request_id,
848 853 f_path=None,
849 854 line_no=None,
850 855 status_change=ChangesetStatus.get_status_lbl(status),
851 856 closing_pr=True
852 857 )
853 858
854 859 ChangesetStatusModel().set_status(
855 860 repo.repo_id,
856 861 status,
857 862 user.user_id,
858 863 comm,
859 864 pull_request=pull_request.pull_request_id
860 865 )
861 866 Session().flush()
862 867
863 868 PullRequestModel().close_pull_request(
864 869 pull_request.pull_request_id, user)
865 870
866 871 def merge_status(self, pull_request):
867 872 if not self._is_merge_enabled(pull_request):
868 873 return False, _('Server-side pull request merging is disabled.')
869 874 if pull_request.is_closed():
870 875 return False, _('This pull request is closed.')
871 876 merge_possible, msg = self._check_repo_requirements(
872 877 target=pull_request.target_repo, source=pull_request.source_repo)
873 878 if not merge_possible:
874 879 return merge_possible, msg
875 880
876 881 try:
877 882 resp = self._try_merge(pull_request)
878 883 status = resp.possible, self.merge_status_message(
879 884 resp.failure_reason)
880 885 except NotImplementedError:
881 886 status = False, _('Pull request merging is not supported.')
882 887
883 888 return status
884 889
885 890 def _check_repo_requirements(self, target, source):
886 891 """
887 892 Check if `target` and `source` have compatible requirements.
888 893
889 894 Currently this is just checking for largefiles.
890 895 """
891 896 target_has_largefiles = self._has_largefiles(target)
892 897 source_has_largefiles = self._has_largefiles(source)
893 898 merge_possible = True
894 899 message = u''
895 900
896 901 if target_has_largefiles != source_has_largefiles:
897 902 merge_possible = False
898 903 if source_has_largefiles:
899 904 message = _(
900 905 'Target repository large files support is disabled.')
901 906 else:
902 907 message = _(
903 908 'Source repository large files support is disabled.')
904 909
905 910 return merge_possible, message
906 911
907 912 def _has_largefiles(self, repo):
908 913 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
909 914 'extensions', 'largefiles')
910 915 return largefiles_ui and largefiles_ui[0].active
911 916
912 917 def _try_merge(self, pull_request):
913 918 """
914 919 Try to merge the pull request and return the merge status.
915 920 """
916 921 log.debug(
917 922 "Trying out if the pull request %s can be merged.",
918 923 pull_request.pull_request_id)
919 924 target_vcs = pull_request.target_repo.scm_instance()
920 925 target_ref = self._refresh_reference(
921 926 pull_request.target_ref_parts, target_vcs)
922 927
923 928 target_locked = pull_request.target_repo.locked
924 929 if target_locked and target_locked[0]:
925 930 log.debug("The target repository is locked.")
926 931 merge_state = MergeResponse(
927 932 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
928 933 elif self._needs_merge_state_refresh(pull_request, target_ref):
929 934 log.debug("Refreshing the merge status of the repository.")
930 935 merge_state = self._refresh_merge_state(
931 936 pull_request, target_vcs, target_ref)
932 937 else:
933 938 possible = pull_request.\
934 939 _last_merge_status == MergeFailureReason.NONE
935 940 merge_state = MergeResponse(
936 941 possible, False, None, pull_request._last_merge_status)
937 942 log.debug("Merge response: %s", merge_state)
938 943 return merge_state
939 944
940 945 def _refresh_reference(self, reference, vcs_repository):
941 946 if reference.type in ('branch', 'book'):
942 947 name_or_id = reference.name
943 948 else:
944 949 name_or_id = reference.commit_id
945 950 refreshed_commit = vcs_repository.get_commit(name_or_id)
946 951 refreshed_reference = Reference(
947 952 reference.type, reference.name, refreshed_commit.raw_id)
948 953 return refreshed_reference
949 954
950 955 def _needs_merge_state_refresh(self, pull_request, target_reference):
951 956 return not(
952 957 pull_request.revisions and
953 958 pull_request.revisions[0] == pull_request._last_merge_source_rev and
954 959 target_reference.commit_id == pull_request._last_merge_target_rev)
955 960
956 961 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
957 962 workspace_id = self._workspace_id(pull_request)
958 963 source_vcs = pull_request.source_repo.scm_instance()
959 964 use_rebase = self._use_rebase_for_merging(pull_request)
960 965 merge_state = target_vcs.merge(
961 966 target_reference, source_vcs, pull_request.source_ref_parts,
962 967 workspace_id, dry_run=True, use_rebase=use_rebase)
963 968
964 969 # Do not store the response if there was an unknown error.
965 970 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
966 971 pull_request._last_merge_source_rev = pull_request.\
967 972 source_ref_parts.commit_id
968 973 pull_request._last_merge_target_rev = target_reference.commit_id
969 974 pull_request._last_merge_status = (
970 975 merge_state.failure_reason)
971 976 Session().add(pull_request)
972 977 Session().flush()
973 978
974 979 return merge_state
975 980
976 981 def _workspace_id(self, pull_request):
977 982 workspace_id = 'pr-%s' % pull_request.pull_request_id
978 983 return workspace_id
979 984
980 985 def merge_status_message(self, status_code):
981 986 """
982 987 Return a human friendly error message for the given merge status code.
983 988 """
984 989 return self.MERGE_STATUS_MESSAGES[status_code]
985 990
986 991 def generate_repo_data(self, repo, commit_id=None, branch=None,
987 992 bookmark=None):
988 993 all_refs, selected_ref = \
989 994 self._get_repo_pullrequest_sources(
990 995 repo.scm_instance(), commit_id=commit_id,
991 996 branch=branch, bookmark=bookmark)
992 997
993 998 refs_select2 = []
994 999 for element in all_refs:
995 1000 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
996 1001 refs_select2.append({'text': element[1], 'children': children})
997 1002
998 1003 return {
999 1004 'user': {
1000 1005 'user_id': repo.user.user_id,
1001 1006 'username': repo.user.username,
1002 1007 'firstname': repo.user.firstname,
1003 1008 'lastname': repo.user.lastname,
1004 1009 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1005 1010 },
1006 1011 'description': h.chop_at_smart(repo.description, '\n'),
1007 1012 'refs': {
1008 1013 'all_refs': all_refs,
1009 1014 'selected_ref': selected_ref,
1010 1015 'select2_refs': refs_select2
1011 1016 }
1012 1017 }
1013 1018
1014 1019 def generate_pullrequest_title(self, source, source_ref, target):
1015 1020 return '{source}#{at_ref} to {target}'.format(
1016 1021 source=source,
1017 1022 at_ref=source_ref,
1018 1023 target=target,
1019 1024 )
1020 1025
1021 1026 def _cleanup_merge_workspace(self, pull_request):
1022 1027 # Merging related cleanup
1023 1028 target_scm = pull_request.target_repo.scm_instance()
1024 1029 workspace_id = 'pr-%s' % pull_request.pull_request_id
1025 1030
1026 1031 try:
1027 1032 target_scm.cleanup_merge_workspace(workspace_id)
1028 1033 except NotImplementedError:
1029 1034 pass
1030 1035
1031 1036 def _get_repo_pullrequest_sources(
1032 1037 self, repo, commit_id=None, branch=None, bookmark=None):
1033 1038 """
1034 1039 Return a structure with repo's interesting commits, suitable for
1035 1040 the selectors in pullrequest controller
1036 1041
1037 1042 :param commit_id: a commit that must be in the list somehow
1038 1043 and selected by default
1039 1044 :param branch: a branch that must be in the list and selected
1040 1045 by default - even if closed
1041 1046 :param bookmark: a bookmark that must be in the list and selected
1042 1047 """
1043 1048
1044 1049 commit_id = safe_str(commit_id) if commit_id else None
1045 1050 branch = safe_str(branch) if branch else None
1046 1051 bookmark = safe_str(bookmark) if bookmark else None
1047 1052
1048 1053 selected = None
1049 1054
1050 1055 # order matters: first source that has commit_id in it will be selected
1051 1056 sources = []
1052 1057 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1053 1058 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1054 1059
1055 1060 if commit_id:
1056 1061 ref_commit = (h.short_id(commit_id), commit_id)
1057 1062 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1058 1063
1059 1064 sources.append(
1060 1065 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1061 1066 )
1062 1067
1063 1068 groups = []
1064 1069 for group_key, ref_list, group_name, match in sources:
1065 1070 group_refs = []
1066 1071 for ref_name, ref_id in ref_list:
1067 1072 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1068 1073 group_refs.append((ref_key, ref_name))
1069 1074
1070 1075 if not selected:
1071 1076 if set([commit_id, match]) & set([ref_id, ref_name]):
1072 1077 selected = ref_key
1073 1078
1074 1079 if group_refs:
1075 1080 groups.append((group_refs, group_name))
1076 1081
1077 1082 if not selected:
1078 1083 ref = commit_id or branch or bookmark
1079 1084 if ref:
1080 1085 raise CommitDoesNotExistError(
1081 1086 'No commit refs could be found matching: %s' % ref)
1082 1087 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1083 1088 selected = 'branch:%s:%s' % (
1084 1089 repo.DEFAULT_BRANCH_NAME,
1085 1090 repo.branches[repo.DEFAULT_BRANCH_NAME]
1086 1091 )
1087 1092 elif repo.commit_ids:
1088 1093 rev = repo.commit_ids[0]
1089 1094 selected = 'rev:%s:%s' % (rev, rev)
1090 1095 else:
1091 1096 raise EmptyRepositoryError()
1092 1097 return groups, selected
1093 1098
1094 1099 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1095 1100 pull_request = self.__get_pull_request(pull_request)
1096 1101 return self._get_diff_from_pr_or_version(pull_request, context=context)
1097 1102
1098 1103 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1099 1104 source_repo = pr_or_version.source_repo
1100 1105
1101 1106 # we swap org/other ref since we run a simple diff on one repo
1102 1107 target_ref_id = pr_or_version.target_ref_parts.commit_id
1103 1108 source_ref_id = pr_or_version.source_ref_parts.commit_id
1104 1109 target_commit = source_repo.get_commit(
1105 1110 commit_id=safe_str(target_ref_id))
1106 1111 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1107 1112 vcs_repo = source_repo.scm_instance()
1108 1113
1109 1114 # TODO: johbo: In the context of an update, we cannot reach
1110 1115 # the old commit anymore with our normal mechanisms. It needs
1111 1116 # some sort of special support in the vcs layer to avoid this
1112 1117 # workaround.
1113 1118 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1114 1119 vcs_repo.alias == 'git'):
1115 1120 source_commit.raw_id = safe_str(source_ref_id)
1116 1121
1117 1122 log.debug('calculating diff between '
1118 1123 'source_ref:%s and target_ref:%s for repo `%s`',
1119 1124 target_ref_id, source_ref_id,
1120 1125 safe_unicode(vcs_repo.path))
1121 1126
1122 1127 vcs_diff = vcs_repo.get_diff(
1123 1128 commit1=target_commit, commit2=source_commit, context=context)
1124 1129 return vcs_diff
1125 1130
1126 1131 def _is_merge_enabled(self, pull_request):
1127 1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1128 1133 settings = settings_model.get_general_settings()
1129 1134 return settings.get('rhodecode_pr_merge_enabled', False)
1130 1135
1131 1136 def _use_rebase_for_merging(self, pull_request):
1132 1137 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1133 1138 settings = settings_model.get_general_settings()
1134 1139 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1135 1140
1136 1141 def _log_action(self, action, user, pull_request):
1137 1142 action_logger(
1138 1143 user,
1139 1144 '{action}:{pr_id}'.format(
1140 1145 action=action, pr_id=pull_request.pull_request_id),
1141 1146 pull_request.target_repo)
1142 1147
1143 1148
1144 1149 ChangeTuple = namedtuple('ChangeTuple',
1145 1150 ['added', 'common', 'removed'])
1146 1151
1147 1152 FileChangeTuple = namedtuple('FileChangeTuple',
1148 1153 ['added', 'modified', 'removed'])
@@ -1,931 +1,934 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime
32 32
33 33 from sqlalchemy.sql import func
34 34 from sqlalchemy.sql.expression import true, or_
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 40 from rhodecode.lib.caching_query import FromCache
41 41 from rhodecode.lib.exceptions import AttachedForksError
42 42 from rhodecode.lib.hooks_base import log_delete_repository
43 43 from rhodecode.lib.utils import make_db_config
44 44 from rhodecode.lib.utils2 import (
45 45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 47 from rhodecode.lib.vcs.backends import get_backend
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 52 RepoGroup, RepositoryField)
53 53 from rhodecode.model.scm import UserGroupList
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoModel(BaseModel):
61 61
62 62 cls = Repository
63 63
64 64 def _get_user_group(self, users_group):
65 65 return self._get_instance(UserGroup, users_group,
66 66 callback=UserGroup.get_by_group_name)
67 67
68 68 def _get_repo_group(self, repo_group):
69 69 return self._get_instance(RepoGroup, repo_group,
70 70 callback=RepoGroup.get_by_group_name)
71 71
72 72 def _create_default_perms(self, repository, private):
73 73 # create default permission
74 74 default = 'repository.read'
75 75 def_user = User.get_default_user()
76 76 for p in def_user.user_perms:
77 77 if p.permission.permission_name.startswith('repository.'):
78 78 default = p.permission.permission_name
79 79 break
80 80
81 81 default_perm = 'repository.none' if private else default
82 82
83 83 repo_to_perm = UserRepoToPerm()
84 84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85 85
86 86 repo_to_perm.repository = repository
87 87 repo_to_perm.user_id = def_user.user_id
88 88
89 89 return repo_to_perm
90 90
91 91 @LazyProperty
92 92 def repos_path(self):
93 93 """
94 94 Gets the repositories root path from database
95 95 """
96 96 settings_model = VcsSettingsModel(sa=self.sa)
97 97 return settings_model.get_repos_location()
98 98
99 99 def get(self, repo_id, cache=False):
100 100 repo = self.sa.query(Repository) \
101 101 .filter(Repository.repo_id == repo_id)
102 102
103 103 if cache:
104 104 repo = repo.options(FromCache("sql_cache_short",
105 105 "get_repo_%s" % repo_id))
106 106 return repo.scalar()
107 107
108 108 def get_repo(self, repository):
109 109 return self._get_repo(repository)
110 110
111 111 def get_by_repo_name(self, repo_name, cache=False):
112 112 repo = self.sa.query(Repository) \
113 113 .filter(Repository.repo_name == repo_name)
114 114
115 115 if cache:
116 116 repo = repo.options(FromCache("sql_cache_short",
117 117 "get_repo_%s" % repo_name))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135 try:
136 136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 137 if _repo_id:
138 138 return self.get(_repo_id)
139 139 except Exception:
140 140 log.exception('Failed to extract repo_name from URL')
141 141
142 142 return None
143 143
144 def get_url(self, repo):
145 return url('summary_home', repo_name=repo.repo_name, qualified=True)
146
144 147 def get_users(self, name_contains=None, limit=20, only_active=True):
145 148 # TODO: mikhail: move this method to the UserModel.
146 149 query = self.sa.query(User)
147 150 if only_active:
148 151 query = query.filter(User.active == true())
149 152
150 153 if name_contains:
151 154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
152 155 query = query.filter(
153 156 or_(
154 157 User.name.ilike(ilike_expression),
155 158 User.lastname.ilike(ilike_expression),
156 159 User.username.ilike(ilike_expression)
157 160 )
158 161 )
159 162 query = query.limit(limit)
160 163 users = query.all()
161 164
162 165 _users = [
163 166 {
164 167 'id': user.user_id,
165 168 'first_name': user.name,
166 169 'last_name': user.lastname,
167 170 'username': user.username,
168 171 'icon_link': h.gravatar_url(user.email, 14),
169 172 'value_display': h.person(user.email),
170 173 'value': user.username,
171 174 'value_type': 'user',
172 175 'active': user.active,
173 176 }
174 177 for user in users
175 178 ]
176 179 return _users
177 180
178 181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
179 182 # TODO: mikhail: move this method to the UserGroupModel.
180 183 query = self.sa.query(UserGroup)
181 184 if only_active:
182 185 query = query.filter(UserGroup.users_group_active == true())
183 186
184 187 if name_contains:
185 188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
186 189 query = query.filter(
187 190 UserGroup.users_group_name.ilike(ilike_expression))\
188 191 .order_by(func.length(UserGroup.users_group_name))\
189 192 .order_by(UserGroup.users_group_name)
190 193
191 194 query = query.limit(limit)
192 195 user_groups = query.all()
193 196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
194 197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
195 198
196 199 _groups = [
197 200 {
198 201 'id': group.users_group_id,
199 202 # TODO: marcink figure out a way to generate the url for the
200 203 # icon
201 204 'icon_link': '',
202 205 'value_display': 'Group: %s (%d members)' % (
203 206 group.users_group_name, len(group.members),),
204 207 'value': group.users_group_name,
205 208 'value_type': 'user_group',
206 209 'active': group.users_group_active,
207 210 }
208 211 for group in user_groups
209 212 ]
210 213 return _groups
211 214
212 215 @classmethod
213 216 def update_repoinfo(cls, repositories=None):
214 217 if not repositories:
215 218 repositories = Repository.getAll()
216 219 for repo in repositories:
217 220 repo.update_commit_cache()
218 221
219 222 def get_repos_as_dict(self, repo_list=None, admin=False,
220 223 super_user_actions=False):
221 224
222 225 from rhodecode.lib.utils import PartialRenderer
223 226 _render = PartialRenderer('data_table/_dt_elements.html')
224 227 c = _render.c
225 228
226 229 def quick_menu(repo_name):
227 230 return _render('quick_menu', repo_name)
228 231
229 232 def repo_lnk(name, rtype, rstate, private, fork_of):
230 233 return _render('repo_name', name, rtype, rstate, private, fork_of,
231 234 short_name=not admin, admin=False)
232 235
233 236 def last_change(last_change):
234 237 return _render("last_change", last_change)
235 238
236 239 def rss_lnk(repo_name):
237 240 return _render("rss", repo_name)
238 241
239 242 def atom_lnk(repo_name):
240 243 return _render("atom", repo_name)
241 244
242 245 def last_rev(repo_name, cs_cache):
243 246 return _render('revision', repo_name, cs_cache.get('revision'),
244 247 cs_cache.get('raw_id'), cs_cache.get('author'),
245 248 cs_cache.get('message'))
246 249
247 250 def desc(desc):
248 251 if c.visual.stylify_metatags:
249 252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
250 253 else:
251 254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
252 255
253 256 def state(repo_state):
254 257 return _render("repo_state", repo_state)
255 258
256 259 def repo_actions(repo_name):
257 260 return _render('repo_actions', repo_name, super_user_actions)
258 261
259 262 def user_profile(username):
260 263 return _render('user_profile', username)
261 264
262 265 repos_data = []
263 266 for repo in repo_list:
264 267 cs_cache = repo.changeset_cache
265 268 row = {
266 269 "menu": quick_menu(repo.repo_name),
267 270
268 271 "name": repo_lnk(repo.repo_name, repo.repo_type,
269 272 repo.repo_state, repo.private, repo.fork),
270 273 "name_raw": repo.repo_name.lower(),
271 274
272 275 "last_change": last_change(repo.last_db_change),
273 276 "last_change_raw": datetime_to_time(repo.last_db_change),
274 277
275 278 "last_changeset": last_rev(repo.repo_name, cs_cache),
276 279 "last_changeset_raw": cs_cache.get('revision'),
277 280
278 281 "desc": desc(repo.description),
279 282 "owner": user_profile(repo.user.username),
280 283
281 284 "state": state(repo.repo_state),
282 285 "rss": rss_lnk(repo.repo_name),
283 286
284 287 "atom": atom_lnk(repo.repo_name),
285 288 }
286 289 if admin:
287 290 row.update({
288 291 "action": repo_actions(repo.repo_name),
289 292 })
290 293 repos_data.append(row)
291 294
292 295 return repos_data
293 296
294 297 def _get_defaults(self, repo_name):
295 298 """
296 299 Gets information about repository, and returns a dict for
297 300 usage in forms
298 301
299 302 :param repo_name:
300 303 """
301 304
302 305 repo_info = Repository.get_by_repo_name(repo_name)
303 306
304 307 if repo_info is None:
305 308 return None
306 309
307 310 defaults = repo_info.get_dict()
308 311 defaults['repo_name'] = repo_info.just_name
309 312
310 313 groups = repo_info.groups_with_parents
311 314 parent_group = groups[-1] if groups else None
312 315
313 316 # we use -1 as this is how in HTML, we mark an empty group
314 317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
315 318
316 319 keys_to_process = (
317 320 {'k': 'repo_type', 'strip': False},
318 321 {'k': 'repo_enable_downloads', 'strip': True},
319 322 {'k': 'repo_description', 'strip': True},
320 323 {'k': 'repo_enable_locking', 'strip': True},
321 324 {'k': 'repo_landing_rev', 'strip': True},
322 325 {'k': 'clone_uri', 'strip': False},
323 326 {'k': 'repo_private', 'strip': True},
324 327 {'k': 'repo_enable_statistics', 'strip': True}
325 328 )
326 329
327 330 for item in keys_to_process:
328 331 attr = item['k']
329 332 if item['strip']:
330 333 attr = remove_prefix(item['k'], 'repo_')
331 334
332 335 val = defaults[attr]
333 336 if item['k'] == 'repo_landing_rev':
334 337 val = ':'.join(defaults[attr])
335 338 defaults[item['k']] = val
336 339 if item['k'] == 'clone_uri':
337 340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
338 341
339 342 # fill owner
340 343 if repo_info.user:
341 344 defaults.update({'user': repo_info.user.username})
342 345 else:
343 346 replacement_user = User.get_first_super_admin().username
344 347 defaults.update({'user': replacement_user})
345 348
346 349 # fill repository users
347 350 for p in repo_info.repo_to_perm:
348 351 defaults.update({'u_perm_%s' % p.user.user_id:
349 352 p.permission.permission_name})
350 353
351 354 # fill repository groups
352 355 for p in repo_info.users_group_to_perm:
353 356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
354 357 p.permission.permission_name})
355 358
356 359 return defaults
357 360
358 361 def update(self, repo, **kwargs):
359 362 try:
360 363 cur_repo = self._get_repo(repo)
361 364 source_repo_name = cur_repo.repo_name
362 365 if 'user' in kwargs:
363 366 cur_repo.user = User.get_by_username(kwargs['user'])
364 367
365 368 if 'repo_group' in kwargs:
366 369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
367 370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
368 371
369 372 update_keys = [
370 373 (1, 'repo_enable_downloads'),
371 374 (1, 'repo_description'),
372 375 (1, 'repo_enable_locking'),
373 376 (1, 'repo_landing_rev'),
374 377 (1, 'repo_private'),
375 378 (1, 'repo_enable_statistics'),
376 379 (0, 'clone_uri'),
377 380 (0, 'fork_id')
378 381 ]
379 382 for strip, k in update_keys:
380 383 if k in kwargs:
381 384 val = kwargs[k]
382 385 if strip:
383 386 k = remove_prefix(k, 'repo_')
384 387 if k == 'clone_uri':
385 388 from rhodecode.model.validators import Missing
386 389 _change = kwargs.get('clone_uri_change')
387 390 if _change in [Missing, 'OLD']:
388 391 # we don't change the value, so use original one
389 392 val = cur_repo.clone_uri
390 393
391 394 setattr(cur_repo, k, val)
392 395
393 396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
394 397 cur_repo.repo_name = new_name
395 398
396 399 # if private flag is set, reset default permission to NONE
397 400 if kwargs.get('repo_private'):
398 401 EMPTY_PERM = 'repository.none'
399 402 RepoModel().grant_user_permission(
400 403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
401 404 )
402 405
403 406 # handle extra fields
404 407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
405 408 kwargs):
406 409 k = RepositoryField.un_prefix_key(field)
407 410 ex_field = RepositoryField.get_by_key_name(
408 411 key=k, repo=cur_repo)
409 412 if ex_field:
410 413 ex_field.field_value = kwargs[field]
411 414 self.sa.add(ex_field)
412 415 self.sa.add(cur_repo)
413 416
414 417 if source_repo_name != new_name:
415 418 # rename repository
416 419 self._rename_filesystem_repo(
417 420 old=source_repo_name, new=new_name)
418 421
419 422 return cur_repo
420 423 except Exception:
421 424 log.error(traceback.format_exc())
422 425 raise
423 426
424 427 def _create_repo(self, repo_name, repo_type, description, owner,
425 428 private=False, clone_uri=None, repo_group=None,
426 429 landing_rev='rev:tip', fork_of=None,
427 430 copy_fork_permissions=False, enable_statistics=False,
428 431 enable_locking=False, enable_downloads=False,
429 432 copy_group_permissions=False,
430 433 state=Repository.STATE_PENDING):
431 434 """
432 435 Create repository inside database with PENDING state, this should be
433 436 only executed by create() repo. With exception of importing existing
434 437 repos
435 438 """
436 439 from rhodecode.model.scm import ScmModel
437 440
438 441 owner = self._get_user(owner)
439 442 fork_of = self._get_repo(fork_of)
440 443 repo_group = self._get_repo_group(safe_int(repo_group))
441 444
442 445 try:
443 446 repo_name = safe_unicode(repo_name)
444 447 description = safe_unicode(description)
445 448 # repo name is just a name of repository
446 449 # while repo_name_full is a full qualified name that is combined
447 450 # with name and path of group
448 451 repo_name_full = repo_name
449 452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
450 453
451 454 new_repo = Repository()
452 455 new_repo.repo_state = state
453 456 new_repo.enable_statistics = False
454 457 new_repo.repo_name = repo_name_full
455 458 new_repo.repo_type = repo_type
456 459 new_repo.user = owner
457 460 new_repo.group = repo_group
458 461 new_repo.description = description or repo_name
459 462 new_repo.private = private
460 463 new_repo.clone_uri = clone_uri
461 464 new_repo.landing_rev = landing_rev
462 465
463 466 new_repo.enable_statistics = enable_statistics
464 467 new_repo.enable_locking = enable_locking
465 468 new_repo.enable_downloads = enable_downloads
466 469
467 470 if repo_group:
468 471 new_repo.enable_locking = repo_group.enable_locking
469 472
470 473 if fork_of:
471 474 parent_repo = fork_of
472 475 new_repo.fork = parent_repo
473 476
474 477 events.trigger(events.RepoPreCreateEvent(new_repo))
475 478
476 479 self.sa.add(new_repo)
477 480
478 481 EMPTY_PERM = 'repository.none'
479 482 if fork_of and copy_fork_permissions:
480 483 repo = fork_of
481 484 user_perms = UserRepoToPerm.query() \
482 485 .filter(UserRepoToPerm.repository == repo).all()
483 486 group_perms = UserGroupRepoToPerm.query() \
484 487 .filter(UserGroupRepoToPerm.repository == repo).all()
485 488
486 489 for perm in user_perms:
487 490 UserRepoToPerm.create(
488 491 perm.user, new_repo, perm.permission)
489 492
490 493 for perm in group_perms:
491 494 UserGroupRepoToPerm.create(
492 495 perm.users_group, new_repo, perm.permission)
493 496 # in case we copy permissions and also set this repo to private
494 497 # override the default user permission to make it a private
495 498 # repo
496 499 if private:
497 500 RepoModel(self.sa).grant_user_permission(
498 501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
499 502
500 503 elif repo_group and copy_group_permissions:
501 504 user_perms = UserRepoGroupToPerm.query() \
502 505 .filter(UserRepoGroupToPerm.group == repo_group).all()
503 506
504 507 group_perms = UserGroupRepoGroupToPerm.query() \
505 508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
506 509
507 510 for perm in user_perms:
508 511 perm_name = perm.permission.permission_name.replace(
509 512 'group.', 'repository.')
510 513 perm_obj = Permission.get_by_key(perm_name)
511 514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
512 515
513 516 for perm in group_perms:
514 517 perm_name = perm.permission.permission_name.replace(
515 518 'group.', 'repository.')
516 519 perm_obj = Permission.get_by_key(perm_name)
517 520 UserGroupRepoToPerm.create(
518 521 perm.users_group, new_repo, perm_obj)
519 522
520 523 if private:
521 524 RepoModel(self.sa).grant_user_permission(
522 525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
523 526
524 527 else:
525 528 perm_obj = self._create_default_perms(new_repo, private)
526 529 self.sa.add(perm_obj)
527 530
528 531 # now automatically start following this repository as owner
529 532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
530 533 owner.user_id)
531 534
532 535 # we need to flush here, in order to check if database won't
533 536 # throw any exceptions, create filesystem dirs at the very end
534 537 self.sa.flush()
535 538 events.trigger(events.RepoCreatedEvent(new_repo))
536 539 return new_repo
537 540
538 541 except Exception:
539 542 log.error(traceback.format_exc())
540 543 raise
541 544
542 545 def create(self, form_data, cur_user):
543 546 """
544 547 Create repository using celery tasks
545 548
546 549 :param form_data:
547 550 :param cur_user:
548 551 """
549 552 from rhodecode.lib.celerylib import tasks, run_task
550 553 return run_task(tasks.create_repo, form_data, cur_user)
551 554
552 555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
553 556 perm_deletions=None, check_perms=True,
554 557 cur_user=None):
555 558 if not perm_additions:
556 559 perm_additions = []
557 560 if not perm_updates:
558 561 perm_updates = []
559 562 if not perm_deletions:
560 563 perm_deletions = []
561 564
562 565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
563 566
564 567 # update permissions
565 568 for member_id, perm, member_type in perm_updates:
566 569 member_id = int(member_id)
567 570 if member_type == 'user':
568 571 # this updates also current one if found
569 572 self.grant_user_permission(
570 573 repo=repo, user=member_id, perm=perm)
571 574 else: # set for user group
572 575 # check if we have permissions to alter this usergroup
573 576 member_name = UserGroup.get(member_id).users_group_name
574 577 if not check_perms or HasUserGroupPermissionAny(
575 578 *req_perms)(member_name, user=cur_user):
576 579 self.grant_user_group_permission(
577 580 repo=repo, group_name=member_id, perm=perm)
578 581
579 582 # set new permissions
580 583 for member_id, perm, member_type in perm_additions:
581 584 member_id = int(member_id)
582 585 if member_type == 'user':
583 586 self.grant_user_permission(
584 587 repo=repo, user=member_id, perm=perm)
585 588 else: # set for user group
586 589 # check if we have permissions to alter this usergroup
587 590 member_name = UserGroup.get(member_id).users_group_name
588 591 if not check_perms or HasUserGroupPermissionAny(
589 592 *req_perms)(member_name, user=cur_user):
590 593 self.grant_user_group_permission(
591 594 repo=repo, group_name=member_id, perm=perm)
592 595
593 596 # delete permissions
594 597 for member_id, perm, member_type in perm_deletions:
595 598 member_id = int(member_id)
596 599 if member_type == 'user':
597 600 self.revoke_user_permission(repo=repo, user=member_id)
598 601 else: # set for user group
599 602 # check if we have permissions to alter this usergroup
600 603 member_name = UserGroup.get(member_id).users_group_name
601 604 if not check_perms or HasUserGroupPermissionAny(
602 605 *req_perms)(member_name, user=cur_user):
603 606 self.revoke_user_group_permission(
604 607 repo=repo, group_name=member_id)
605 608
606 609 def create_fork(self, form_data, cur_user):
607 610 """
608 611 Simple wrapper into executing celery task for fork creation
609 612
610 613 :param form_data:
611 614 :param cur_user:
612 615 """
613 616 from rhodecode.lib.celerylib import tasks, run_task
614 617 return run_task(tasks.create_repo_fork, form_data, cur_user)
615 618
616 619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
617 620 """
618 621 Delete given repository, forks parameter defines what do do with
619 622 attached forks. Throws AttachedForksError if deleted repo has attached
620 623 forks
621 624
622 625 :param repo:
623 626 :param forks: str 'delete' or 'detach'
624 627 :param fs_remove: remove(archive) repo from filesystem
625 628 """
626 629 if not cur_user:
627 630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
628 631 repo = self._get_repo(repo)
629 632 if repo:
630 633 if forks == 'detach':
631 634 for r in repo.forks:
632 635 r.fork = None
633 636 self.sa.add(r)
634 637 elif forks == 'delete':
635 638 for r in repo.forks:
636 639 self.delete(r, forks='delete')
637 640 elif [f for f in repo.forks]:
638 641 raise AttachedForksError()
639 642
640 643 old_repo_dict = repo.get_dict()
641 644 events.trigger(events.RepoPreDeleteEvent(repo))
642 645 try:
643 646 self.sa.delete(repo)
644 647 if fs_remove:
645 648 self._delete_filesystem_repo(repo)
646 649 else:
647 650 log.debug('skipping removal from filesystem')
648 651 old_repo_dict.update({
649 652 'deleted_by': cur_user,
650 653 'deleted_on': time.time(),
651 654 })
652 655 log_delete_repository(**old_repo_dict)
653 656 events.trigger(events.RepoDeletedEvent(repo))
654 657 except Exception:
655 658 log.error(traceback.format_exc())
656 659 raise
657 660
658 661 def grant_user_permission(self, repo, user, perm):
659 662 """
660 663 Grant permission for user on given repository, or update existing one
661 664 if found
662 665
663 666 :param repo: Instance of Repository, repository_id, or repository name
664 667 :param user: Instance of User, user_id or username
665 668 :param perm: Instance of Permission, or permission_name
666 669 """
667 670 user = self._get_user(user)
668 671 repo = self._get_repo(repo)
669 672 permission = self._get_perm(perm)
670 673
671 674 # check if we have that permission already
672 675 obj = self.sa.query(UserRepoToPerm) \
673 676 .filter(UserRepoToPerm.user == user) \
674 677 .filter(UserRepoToPerm.repository == repo) \
675 678 .scalar()
676 679 if obj is None:
677 680 # create new !
678 681 obj = UserRepoToPerm()
679 682 obj.repository = repo
680 683 obj.user = user
681 684 obj.permission = permission
682 685 self.sa.add(obj)
683 686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
684 687 action_logger_generic(
685 688 'granted permission: {} to user: {} on repo: {}'.format(
686 689 perm, user, repo), namespace='security.repo')
687 690 return obj
688 691
689 692 def revoke_user_permission(self, repo, user):
690 693 """
691 694 Revoke permission for user on given repository
692 695
693 696 :param repo: Instance of Repository, repository_id, or repository name
694 697 :param user: Instance of User, user_id or username
695 698 """
696 699
697 700 user = self._get_user(user)
698 701 repo = self._get_repo(repo)
699 702
700 703 obj = self.sa.query(UserRepoToPerm) \
701 704 .filter(UserRepoToPerm.repository == repo) \
702 705 .filter(UserRepoToPerm.user == user) \
703 706 .scalar()
704 707 if obj:
705 708 self.sa.delete(obj)
706 709 log.debug('Revoked perm on %s on %s', repo, user)
707 710 action_logger_generic(
708 711 'revoked permission from user: {} on repo: {}'.format(
709 712 user, repo), namespace='security.repo')
710 713
711 714 def grant_user_group_permission(self, repo, group_name, perm):
712 715 """
713 716 Grant permission for user group on given repository, or update
714 717 existing one if found
715 718
716 719 :param repo: Instance of Repository, repository_id, or repository name
717 720 :param group_name: Instance of UserGroup, users_group_id,
718 721 or user group name
719 722 :param perm: Instance of Permission, or permission_name
720 723 """
721 724 repo = self._get_repo(repo)
722 725 group_name = self._get_user_group(group_name)
723 726 permission = self._get_perm(perm)
724 727
725 728 # check if we have that permission already
726 729 obj = self.sa.query(UserGroupRepoToPerm) \
727 730 .filter(UserGroupRepoToPerm.users_group == group_name) \
728 731 .filter(UserGroupRepoToPerm.repository == repo) \
729 732 .scalar()
730 733
731 734 if obj is None:
732 735 # create new
733 736 obj = UserGroupRepoToPerm()
734 737
735 738 obj.repository = repo
736 739 obj.users_group = group_name
737 740 obj.permission = permission
738 741 self.sa.add(obj)
739 742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
740 743 action_logger_generic(
741 744 'granted permission: {} to usergroup: {} on repo: {}'.format(
742 745 perm, group_name, repo), namespace='security.repo')
743 746
744 747 return obj
745 748
746 749 def revoke_user_group_permission(self, repo, group_name):
747 750 """
748 751 Revoke permission for user group on given repository
749 752
750 753 :param repo: Instance of Repository, repository_id, or repository name
751 754 :param group_name: Instance of UserGroup, users_group_id,
752 755 or user group name
753 756 """
754 757 repo = self._get_repo(repo)
755 758 group_name = self._get_user_group(group_name)
756 759
757 760 obj = self.sa.query(UserGroupRepoToPerm) \
758 761 .filter(UserGroupRepoToPerm.repository == repo) \
759 762 .filter(UserGroupRepoToPerm.users_group == group_name) \
760 763 .scalar()
761 764 if obj:
762 765 self.sa.delete(obj)
763 766 log.debug('Revoked perm to %s on %s', repo, group_name)
764 767 action_logger_generic(
765 768 'revoked permission from usergroup: {} on repo: {}'.format(
766 769 group_name, repo), namespace='security.repo')
767 770
768 771 def delete_stats(self, repo_name):
769 772 """
770 773 removes stats for given repo
771 774
772 775 :param repo_name:
773 776 """
774 777 repo = self._get_repo(repo_name)
775 778 try:
776 779 obj = self.sa.query(Statistics) \
777 780 .filter(Statistics.repository == repo).scalar()
778 781 if obj:
779 782 self.sa.delete(obj)
780 783 except Exception:
781 784 log.error(traceback.format_exc())
782 785 raise
783 786
784 787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
785 788 field_type='str', field_desc=''):
786 789
787 790 repo = self._get_repo(repo_name)
788 791
789 792 new_field = RepositoryField()
790 793 new_field.repository = repo
791 794 new_field.field_key = field_key
792 795 new_field.field_type = field_type # python type
793 796 new_field.field_value = field_value
794 797 new_field.field_desc = field_desc
795 798 new_field.field_label = field_label
796 799 self.sa.add(new_field)
797 800 return new_field
798 801
799 802 def delete_repo_field(self, repo_name, field_key):
800 803 repo = self._get_repo(repo_name)
801 804 field = RepositoryField.get_by_key_name(field_key, repo)
802 805 if field:
803 806 self.sa.delete(field)
804 807
805 808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
806 809 clone_uri=None, repo_store_location=None,
807 810 use_global_config=False):
808 811 """
809 812 makes repository on filesystem. It's group aware means it'll create
810 813 a repository within a group, and alter the paths accordingly of
811 814 group location
812 815
813 816 :param repo_name:
814 817 :param alias:
815 818 :param parent:
816 819 :param clone_uri:
817 820 :param repo_store_location:
818 821 """
819 822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
820 823 from rhodecode.model.scm import ScmModel
821 824
822 825 if Repository.NAME_SEP in repo_name:
823 826 raise ValueError(
824 827 'repo_name must not contain groups got `%s`' % repo_name)
825 828
826 829 if isinstance(repo_group, RepoGroup):
827 830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
828 831 else:
829 832 new_parent_path = repo_group or ''
830 833
831 834 if repo_store_location:
832 835 _paths = [repo_store_location]
833 836 else:
834 837 _paths = [self.repos_path, new_parent_path, repo_name]
835 838 # we need to make it str for mercurial
836 839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
837 840
838 841 # check if this path is not a repository
839 842 if is_valid_repo(repo_path, self.repos_path):
840 843 raise Exception('This path %s is a valid repository' % repo_path)
841 844
842 845 # check if this path is a group
843 846 if is_valid_repo_group(repo_path, self.repos_path):
844 847 raise Exception('This path %s is a valid group' % repo_path)
845 848
846 849 log.info('creating repo %s in %s from url: `%s`',
847 850 repo_name, safe_unicode(repo_path),
848 851 obfuscate_url_pw(clone_uri))
849 852
850 853 backend = get_backend(repo_type)
851 854
852 855 config_repo = None if use_global_config else repo_name
853 856 if config_repo and new_parent_path:
854 857 config_repo = Repository.NAME_SEP.join(
855 858 (new_parent_path, config_repo))
856 859 config = make_db_config(clear_session=False, repo=config_repo)
857 860 config.set('extensions', 'largefiles', '')
858 861
859 862 # patch and reset hooks section of UI config to not run any
860 863 # hooks on creating remote repo
861 864 config.clear_section('hooks')
862 865
863 866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
864 867 if repo_type == 'git':
865 868 repo = backend(
866 869 repo_path, config=config, create=True, src_url=clone_uri,
867 870 bare=True)
868 871 else:
869 872 repo = backend(
870 873 repo_path, config=config, create=True, src_url=clone_uri)
871 874
872 875 ScmModel().install_hooks(repo, repo_type=repo_type)
873 876
874 877 log.debug('Created repo %s with %s backend',
875 878 safe_unicode(repo_name), safe_unicode(repo_type))
876 879 return repo
877 880
878 881 def _rename_filesystem_repo(self, old, new):
879 882 """
880 883 renames repository on filesystem
881 884
882 885 :param old: old name
883 886 :param new: new name
884 887 """
885 888 log.info('renaming repo from %s to %s', old, new)
886 889
887 890 old_path = os.path.join(self.repos_path, old)
888 891 new_path = os.path.join(self.repos_path, new)
889 892 if os.path.isdir(new_path):
890 893 raise Exception(
891 894 'Was trying to rename to already existing dir %s' % new_path
892 895 )
893 896 shutil.move(old_path, new_path)
894 897
895 898 def _delete_filesystem_repo(self, repo):
896 899 """
897 900 removes repo from filesystem, the removal is acctually made by
898 901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
899 902 repository is no longer valid for rhodecode, can be undeleted later on
900 903 by reverting the renames on this repository
901 904
902 905 :param repo: repo object
903 906 """
904 907 rm_path = os.path.join(self.repos_path, repo.repo_name)
905 908 repo_group = repo.group
906 909 log.info("Removing repository %s", rm_path)
907 910 # disable hg/git internal that it doesn't get detected as repo
908 911 alias = repo.repo_type
909 912
910 913 config = make_db_config(clear_session=False)
911 914 config.set('extensions', 'largefiles', '')
912 915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
913 916
914 917 # skip this for bare git repos
915 918 if not bare:
916 919 # disable VCS repo
917 920 vcs_path = os.path.join(rm_path, '.%s' % alias)
918 921 if os.path.exists(vcs_path):
919 922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
920 923
921 924 _now = datetime.now()
922 925 _ms = str(_now.microsecond).rjust(6, '0')
923 926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
924 927 repo.just_name)
925 928 if repo_group:
926 929 # if repository is in group, prefix the removal path with the group
927 930 args = repo_group.full_path_splitted + [_d]
928 931 _d = os.path.join(*args)
929 932
930 933 if os.path.isdir(rm_path):
931 934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,249 +1,250 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Import early to make sure things are patched up properly
4 4 from setuptools import setup, find_packages
5 5
6 6 import os
7 7 import sys
8 8 import platform
9 9
10 10 if sys.version_info < (2, 7):
11 11 raise Exception('RhodeCode requires Python 2.7 or later')
12 12
13 13
14 14 here = os.path.abspath(os.path.dirname(__file__))
15 15
16 16
17 17 def _get_meta_var(name, data, callback_handler=None):
18 18 import re
19 19 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
20 20 if matches:
21 21 if not callable(callback_handler):
22 22 callback_handler = lambda v: v
23 23
24 24 return callback_handler(eval(matches.groups()[0]))
25 25
26 26 _meta = open(os.path.join(here, 'rhodecode', '__init__.py'), 'rb')
27 27 _metadata = _meta.read()
28 28 _meta.close()
29 29
30 30 callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
31 31 __version__ = open(os.path.join('rhodecode', 'VERSION')).read().strip()
32 32 __license__ = _get_meta_var('__license__', _metadata)
33 33 __author__ = _get_meta_var('__author__', _metadata)
34 34 __url__ = _get_meta_var('__url__', _metadata)
35 35 # defines current platform
36 36 __platform__ = platform.system()
37 37
38 38 # Cygwin has different platform identifiers, but they all contain the
39 39 # term "CYGWIN"
40 40 is_windows = __platform__ == 'Windows' or 'CYGWIN' in __platform__
41 41
42 42 requirements = [
43 43 'Babel',
44 44 'Beaker',
45 45 'FormEncode',
46 46 'Mako',
47 47 'Markdown',
48 48 'MarkupSafe',
49 49 'MySQL-python',
50 50 'Paste',
51 51 'PasteDeploy',
52 52 'PasteScript',
53 53 'Pygments',
54 54 'Pylons',
55 55 'Pyro4',
56 56 'Routes',
57 57 'SQLAlchemy',
58 58 'Tempita',
59 59 'URLObject',
60 60 'WebError',
61 61 'WebHelpers',
62 62 'WebHelpers2',
63 63 'WebOb',
64 64 'WebTest',
65 65 'Whoosh',
66 66 'alembic',
67 67 'amqplib',
68 68 'anyjson',
69 69 'appenlight-client',
70 70 'authomatic',
71 71 'backport_ipaddress',
72 72 'celery',
73 73 'colander',
74 74 'decorator',
75 75 'docutils',
76 76 'gunicorn',
77 77 'infrae.cache',
78 78 'ipython',
79 79 'iso8601',
80 80 'kombu',
81 'marshmallow',
81 82 'msgpack-python',
82 83 'packaging',
83 84 'psycopg2',
84 85 'py-gfm',
85 86 'pycrypto',
86 87 'pycurl',
87 88 'pyparsing',
88 89 'pyramid',
89 90 'pyramid-debugtoolbar',
90 91 'pyramid-mako',
91 92 'pyramid-beaker',
92 93 'pysqlite',
93 94 'python-dateutil',
94 95 'python-ldap',
95 96 'python-memcached',
96 97 'python-pam',
97 98 'recaptcha-client',
98 99 'repoze.lru',
99 100 'requests',
100 101 'simplejson',
101 102 'waitress',
102 103 'zope.cachedescriptors',
103 104 'dogpile.cache',
104 105 'dogpile.core'
105 106 ]
106 107
107 108 if is_windows:
108 109 pass
109 110 else:
110 111 requirements.append('psutil')
111 112 requirements.append('py-bcrypt')
112 113
113 114 test_requirements = [
114 115 'WebTest',
115 116 'configobj',
116 117 'cssselect',
117 118 'flake8',
118 119 'lxml',
119 120 'mock',
120 121 'pytest',
121 122 'pytest-cov',
122 123 'pytest-runner',
123 124 ]
124 125
125 126 setup_requirements = [
126 127 'PasteScript',
127 128 'pytest-runner',
128 129 ]
129 130
130 131 dependency_links = [
131 132 ]
132 133
133 134 classifiers = [
134 135 'Development Status :: 6 - Mature',
135 136 'Environment :: Web Environment',
136 137 'Framework :: Pylons',
137 138 'Intended Audience :: Developers',
138 139 'Operating System :: OS Independent',
139 140 'Programming Language :: Python',
140 141 'Programming Language :: Python :: 2.7',
141 142 ]
142 143
143 144
144 145 # additional files from project that goes somewhere in the filesystem
145 146 # relative to sys.prefix
146 147 data_files = []
147 148
148 149 # additional files that goes into package itself
149 150 package_data = {'rhodecode': ['i18n/*/LC_MESSAGES/*.mo', ], }
150 151
151 152 description = ('RhodeCode is a fast and powerful management tool '
152 153 'for Mercurial and GIT with a built in push/pull server, '
153 154 'full text search and code-review.')
154 155
155 156 keywords = ' '.join([
156 157 'rhodecode', 'rhodiumcode', 'mercurial', 'git', 'code review',
157 158 'repo groups', 'ldap', 'repository management', 'hgweb replacement',
158 159 'hgwebdir', 'gitweb replacement', 'serving hgweb',
159 160 ])
160 161
161 162 # long description
162 163 README_FILE = 'README.rst'
163 164 CHANGELOG_FILE = 'CHANGES.rst'
164 165 try:
165 166 long_description = open(README_FILE).read() + '\n\n' + \
166 167 open(CHANGELOG_FILE).read()
167 168
168 169 except IOError, err:
169 170 sys.stderr.write(
170 171 '[WARNING] Cannot find file specified as long_description (%s)\n or '
171 172 'changelog (%s) skipping that file' % (README_FILE, CHANGELOG_FILE)
172 173 )
173 174 long_description = description
174 175
175 176 # packages
176 177 packages = find_packages()
177 178
178 179 paster_commands = [
179 180 'make-config=rhodecode.lib.paster_commands.make_config:Command',
180 181 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
181 182 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
182 183 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
183 184 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 185 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
185 186 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
186 187 ]
187 188
188 189 setup(
189 190 name='rhodecode-enterprise-ce',
190 191 version=__version__,
191 192 description=description,
192 193 long_description=long_description,
193 194 keywords=keywords,
194 195 license=__license__,
195 196 author=__author__,
196 197 author_email='marcin@rhodecode.com',
197 198 dependency_links=dependency_links,
198 199 url=__url__,
199 200 install_requires=requirements,
200 201 tests_require=test_requirements,
201 202 classifiers=classifiers,
202 203 setup_requires=setup_requirements,
203 204 data_files=data_files,
204 205 packages=packages,
205 206 include_package_data=True,
206 207 package_data=package_data,
207 208 message_extractors={
208 209 'rhodecode': [
209 210 ('**.py', 'python', None),
210 211 ('**.js', 'javascript', None),
211 212 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
212 213 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
213 214 ('public/**', 'ignore', None),
214 215 ]
215 216 },
216 217 zip_safe=False,
217 218 paster_plugins=['PasteScript', 'Pylons'],
218 219 entry_points={
219 220 'enterprise.plugins1': [
220 221 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 222 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 223 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 224 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 225 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 226 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 227 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 228 ],
228 229 'paste.app_factory': [
229 230 'main=rhodecode.config.middleware:make_pyramid_app',
230 231 'pylons=rhodecode.config.middleware:make_app',
231 232 ],
232 233 'paste.app_install': [
233 234 'main=pylons.util:PylonsInstaller',
234 235 'pylons=pylons.util:PylonsInstaller',
235 236 ],
236 237 'paste.global_paster_command': paster_commands,
237 238 'pytest11': [
238 239 'pylons=rhodecode.tests.pylons_plugin',
239 240 'enterprise=rhodecode.tests.plugin',
240 241 ],
241 242 'console_scripts': [
242 243 'rcserver=rhodecode.rcserver:main',
243 244 ],
244 245 'beaker.backends': [
245 246 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
246 247 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
247 248 ]
248 249 },
249 250 )
General Comments 0
You need to be logged in to leave comments. Login now