##// END OF EJS Templates
events: add serialization .to_dict() to events based on marshmallow
dan -
r379:a86e0931 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,69 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from datetime import datetime
20 from marshmallow import Schema, fields
21 from pyramid.threadlocal import get_current_request
22 from rhodecode.lib.utils2 import AttributeDict
23
24
25 SYSTEM_USER = AttributeDict(dict(
26 username='__SYSTEM__'
27 ))
28
29
30 class UserSchema(Schema):
31 """
32 Marshmallow schema for a user
33 """
34 username = fields.Str()
35
36
37 class RhodecodeEventSchema(Schema):
38 """
39 Marshmallow schema for a rhodecode event
40 """
41 utc_timestamp = fields.DateTime()
42 acting_user = fields.Nested(UserSchema)
43 acting_ip = fields.Str()
44
45
46 class RhodecodeEvent(object):
47 """
48 Base event class for all Rhodecode events
49 """
50 MarshmallowSchema = RhodecodeEventSchema
51
52 def __init__(self):
53 self.request = get_current_request()
54 self.utc_timestamp = datetime.utcnow()
55
56 @property
57 def acting_user(self):
58 if self.request:
59 return self.request.user.get_instance()
60 return SYSTEM_USER
61
62 @property
63 def acting_ip(self):
64 if self.request:
65 return self.request.user.ip_addr
66 return '<no ip available>'
67
68 def as_dict(self):
69 return self.MarshmallowSchema().dump(self).data No newline at end of file
@@ -1,1641 +1,1654 b''
1 1 {
2 2 Babel = super.buildPythonPackage {
3 3 name = "Babel-1.3";
4 4 buildInputs = with self; [];
5 5 doCheck = false;
6 6 propagatedBuildInputs = with self; [pytz];
7 7 src = fetchurl {
8 8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 10 };
11 11 meta = {
12 12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 13 };
14 14 };
15 15 Beaker = super.buildPythonPackage {
16 16 name = "Beaker-1.7.0";
17 17 buildInputs = with self; [];
18 18 doCheck = false;
19 19 propagatedBuildInputs = with self; [];
20 20 src = fetchurl {
21 21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
22 22 md5 = "386be3f7fe427358881eee4622b428b3";
23 23 };
24 24 meta = {
25 25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 26 };
27 27 };
28 28 CProfileV = super.buildPythonPackage {
29 29 name = "CProfileV-1.0.6";
30 30 buildInputs = with self; [];
31 31 doCheck = false;
32 32 propagatedBuildInputs = with self; [bottle];
33 33 src = fetchurl {
34 34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
35 35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
36 36 };
37 37 meta = {
38 38 license = [ pkgs.lib.licenses.mit ];
39 39 };
40 40 };
41 41 Fabric = super.buildPythonPackage {
42 42 name = "Fabric-1.10.0";
43 43 buildInputs = with self; [];
44 44 doCheck = false;
45 45 propagatedBuildInputs = with self; [paramiko];
46 46 src = fetchurl {
47 47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
48 48 md5 = "2cb96473387f0e7aa035210892352f4a";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 FormEncode = super.buildPythonPackage {
55 55 name = "FormEncode-1.2.4";
56 56 buildInputs = with self; [];
57 57 doCheck = false;
58 58 propagatedBuildInputs = with self; [];
59 59 src = fetchurl {
60 60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
61 61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
62 62 };
63 63 meta = {
64 64 license = [ pkgs.lib.licenses.psfl ];
65 65 };
66 66 };
67 67 Jinja2 = super.buildPythonPackage {
68 68 name = "Jinja2-2.7.3";
69 69 buildInputs = with self; [];
70 70 doCheck = false;
71 71 propagatedBuildInputs = with self; [MarkupSafe];
72 72 src = fetchurl {
73 73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
74 74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
75 75 };
76 76 meta = {
77 77 license = [ pkgs.lib.licenses.bsdOriginal ];
78 78 };
79 79 };
80 80 Mako = super.buildPythonPackage {
81 81 name = "Mako-1.0.1";
82 82 buildInputs = with self; [];
83 83 doCheck = false;
84 84 propagatedBuildInputs = with self; [MarkupSafe];
85 85 src = fetchurl {
86 86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
87 87 md5 = "9f0aafd177b039ef67b90ea350497a54";
88 88 };
89 89 meta = {
90 90 license = [ pkgs.lib.licenses.mit ];
91 91 };
92 92 };
93 93 Markdown = super.buildPythonPackage {
94 94 name = "Markdown-2.6.2";
95 95 buildInputs = with self; [];
96 96 doCheck = false;
97 97 propagatedBuildInputs = with self; [];
98 98 src = fetchurl {
99 99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
100 100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
101 101 };
102 102 meta = {
103 103 license = [ pkgs.lib.licenses.bsdOriginal ];
104 104 };
105 105 };
106 106 MarkupSafe = super.buildPythonPackage {
107 107 name = "MarkupSafe-0.23";
108 108 buildInputs = with self; [];
109 109 doCheck = false;
110 110 propagatedBuildInputs = with self; [];
111 111 src = fetchurl {
112 112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
113 113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
114 114 };
115 115 meta = {
116 116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 117 };
118 118 };
119 119 MySQL-python = super.buildPythonPackage {
120 120 name = "MySQL-python-1.2.5";
121 121 buildInputs = with self; [];
122 122 doCheck = false;
123 123 propagatedBuildInputs = with self; [];
124 124 src = fetchurl {
125 125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
126 126 md5 = "654f75b302db6ed8dc5a898c625e030c";
127 127 };
128 128 meta = {
129 129 license = [ pkgs.lib.licenses.gpl1 ];
130 130 };
131 131 };
132 132 Paste = super.buildPythonPackage {
133 133 name = "Paste-2.0.2";
134 134 buildInputs = with self; [];
135 135 doCheck = false;
136 136 propagatedBuildInputs = with self; [six];
137 137 src = fetchurl {
138 138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
139 139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
140 140 };
141 141 meta = {
142 142 license = [ pkgs.lib.licenses.mit ];
143 143 };
144 144 };
145 145 PasteDeploy = super.buildPythonPackage {
146 146 name = "PasteDeploy-1.5.2";
147 147 buildInputs = with self; [];
148 148 doCheck = false;
149 149 propagatedBuildInputs = with self; [];
150 150 src = fetchurl {
151 151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
152 152 md5 = "352b7205c78c8de4987578d19431af3b";
153 153 };
154 154 meta = {
155 155 license = [ pkgs.lib.licenses.mit ];
156 156 };
157 157 };
158 158 PasteScript = super.buildPythonPackage {
159 159 name = "PasteScript-1.7.5";
160 160 buildInputs = with self; [];
161 161 doCheck = false;
162 162 propagatedBuildInputs = with self; [Paste PasteDeploy];
163 163 src = fetchurl {
164 164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
165 165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
166 166 };
167 167 meta = {
168 168 license = [ pkgs.lib.licenses.mit ];
169 169 };
170 170 };
171 171 Pygments = super.buildPythonPackage {
172 172 name = "Pygments-2.1.3";
173 173 buildInputs = with self; [];
174 174 doCheck = false;
175 175 propagatedBuildInputs = with self; [];
176 176 src = fetchurl {
177 177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
178 178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 Pylons = super.buildPythonPackage {
185 185 name = "Pylons-1.0.1";
186 186 buildInputs = with self; [];
187 187 doCheck = false;
188 188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
189 189 src = fetchurl {
190 190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
191 191 md5 = "6cb880d75fa81213192142b07a6e4915";
192 192 };
193 193 meta = {
194 194 license = [ pkgs.lib.licenses.bsdOriginal ];
195 195 };
196 196 };
197 197 Pyro4 = super.buildPythonPackage {
198 198 name = "Pyro4-4.41";
199 199 buildInputs = with self; [];
200 200 doCheck = false;
201 201 propagatedBuildInputs = with self; [serpent];
202 202 src = fetchurl {
203 203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
204 204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
205 205 };
206 206 meta = {
207 207 license = [ pkgs.lib.licenses.mit ];
208 208 };
209 209 };
210 210 Routes = super.buildPythonPackage {
211 211 name = "Routes-1.13";
212 212 buildInputs = with self; [];
213 213 doCheck = false;
214 214 propagatedBuildInputs = with self; [repoze.lru];
215 215 src = fetchurl {
216 216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
217 217 md5 = "d527b0ab7dd9172b1275a41f97448783";
218 218 };
219 219 meta = {
220 220 license = [ pkgs.lib.licenses.bsdOriginal ];
221 221 };
222 222 };
223 223 SQLAlchemy = super.buildPythonPackage {
224 224 name = "SQLAlchemy-0.9.9";
225 225 buildInputs = with self; [];
226 226 doCheck = false;
227 227 propagatedBuildInputs = with self; [];
228 228 src = fetchurl {
229 229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
230 230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
231 231 };
232 232 meta = {
233 233 license = [ pkgs.lib.licenses.mit ];
234 234 };
235 235 };
236 236 Sphinx = super.buildPythonPackage {
237 237 name = "Sphinx-1.2.2";
238 238 buildInputs = with self; [];
239 239 doCheck = false;
240 240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
241 241 src = fetchurl {
242 242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
243 243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
244 244 };
245 245 meta = {
246 246 license = [ pkgs.lib.licenses.bsdOriginal ];
247 247 };
248 248 };
249 249 Tempita = super.buildPythonPackage {
250 250 name = "Tempita-0.5.2";
251 251 buildInputs = with self; [];
252 252 doCheck = false;
253 253 propagatedBuildInputs = with self; [];
254 254 src = fetchurl {
255 255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
256 256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
257 257 };
258 258 meta = {
259 259 license = [ pkgs.lib.licenses.mit ];
260 260 };
261 261 };
262 262 URLObject = super.buildPythonPackage {
263 263 name = "URLObject-2.4.0";
264 264 buildInputs = with self; [];
265 265 doCheck = false;
266 266 propagatedBuildInputs = with self; [];
267 267 src = fetchurl {
268 268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
269 269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
270 270 };
271 271 meta = {
272 272 license = [ ];
273 273 };
274 274 };
275 275 WebError = super.buildPythonPackage {
276 276 name = "WebError-0.10.3";
277 277 buildInputs = with self; [];
278 278 doCheck = false;
279 279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
280 280 src = fetchurl {
281 281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
282 282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
283 283 };
284 284 meta = {
285 285 license = [ pkgs.lib.licenses.mit ];
286 286 };
287 287 };
288 288 WebHelpers = super.buildPythonPackage {
289 289 name = "WebHelpers-1.3";
290 290 buildInputs = with self; [];
291 291 doCheck = false;
292 292 propagatedBuildInputs = with self; [MarkupSafe];
293 293 src = fetchurl {
294 294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
295 295 md5 = "32749ffadfc40fea51075a7def32588b";
296 296 };
297 297 meta = {
298 298 license = [ pkgs.lib.licenses.bsdOriginal ];
299 299 };
300 300 };
301 301 WebHelpers2 = super.buildPythonPackage {
302 302 name = "WebHelpers2-2.0";
303 303 buildInputs = with self; [];
304 304 doCheck = false;
305 305 propagatedBuildInputs = with self; [MarkupSafe six];
306 306 src = fetchurl {
307 307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
308 308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
309 309 };
310 310 meta = {
311 311 license = [ pkgs.lib.licenses.mit ];
312 312 };
313 313 };
314 314 WebOb = super.buildPythonPackage {
315 315 name = "WebOb-1.3.1";
316 316 buildInputs = with self; [];
317 317 doCheck = false;
318 318 propagatedBuildInputs = with self; [];
319 319 src = fetchurl {
320 320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
321 321 md5 = "20918251c5726956ba8fef22d1556177";
322 322 };
323 323 meta = {
324 324 license = [ pkgs.lib.licenses.mit ];
325 325 };
326 326 };
327 327 WebTest = super.buildPythonPackage {
328 328 name = "WebTest-1.4.3";
329 329 buildInputs = with self; [];
330 330 doCheck = false;
331 331 propagatedBuildInputs = with self; [WebOb];
332 332 src = fetchurl {
333 333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
334 334 md5 = "631ce728bed92c681a4020a36adbc353";
335 335 };
336 336 meta = {
337 337 license = [ pkgs.lib.licenses.mit ];
338 338 };
339 339 };
340 340 Whoosh = super.buildPythonPackage {
341 341 name = "Whoosh-2.7.0";
342 342 buildInputs = with self; [];
343 343 doCheck = false;
344 344 propagatedBuildInputs = with self; [];
345 345 src = fetchurl {
346 346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
347 347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
348 348 };
349 349 meta = {
350 350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
351 351 };
352 352 };
353 353 alembic = super.buildPythonPackage {
354 354 name = "alembic-0.8.4";
355 355 buildInputs = with self; [];
356 356 doCheck = false;
357 357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
358 358 src = fetchurl {
359 359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
360 360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
361 361 };
362 362 meta = {
363 363 license = [ pkgs.lib.licenses.mit ];
364 364 };
365 365 };
366 366 amqplib = super.buildPythonPackage {
367 367 name = "amqplib-1.0.2";
368 368 buildInputs = with self; [];
369 369 doCheck = false;
370 370 propagatedBuildInputs = with self; [];
371 371 src = fetchurl {
372 372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
373 373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
374 374 };
375 375 meta = {
376 376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
377 377 };
378 378 };
379 379 anyjson = super.buildPythonPackage {
380 380 name = "anyjson-0.3.3";
381 381 buildInputs = with self; [];
382 382 doCheck = false;
383 383 propagatedBuildInputs = with self; [];
384 384 src = fetchurl {
385 385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
386 386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
387 387 };
388 388 meta = {
389 389 license = [ pkgs.lib.licenses.bsdOriginal ];
390 390 };
391 391 };
392 392 appenlight-client = super.buildPythonPackage {
393 393 name = "appenlight-client-0.6.14";
394 394 buildInputs = with self; [];
395 395 doCheck = false;
396 396 propagatedBuildInputs = with self; [WebOb requests];
397 397 src = fetchurl {
398 398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
399 399 md5 = "578c69b09f4356d898fff1199b98a95c";
400 400 };
401 401 meta = {
402 402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
403 403 };
404 404 };
405 405 authomatic = super.buildPythonPackage {
406 406 name = "authomatic-0.1.0.post1";
407 407 buildInputs = with self; [];
408 408 doCheck = false;
409 409 propagatedBuildInputs = with self; [];
410 410 src = fetchurl {
411 411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
412 412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
413 413 };
414 414 meta = {
415 415 license = [ pkgs.lib.licenses.mit ];
416 416 };
417 417 };
418 418 backport-ipaddress = super.buildPythonPackage {
419 419 name = "backport-ipaddress-0.1";
420 420 buildInputs = with self; [];
421 421 doCheck = false;
422 422 propagatedBuildInputs = with self; [];
423 423 src = fetchurl {
424 424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
425 425 md5 = "9c1f45f4361f71b124d7293a60006c05";
426 426 };
427 427 meta = {
428 428 license = [ pkgs.lib.licenses.psfl ];
429 429 };
430 430 };
431 431 bottle = super.buildPythonPackage {
432 432 name = "bottle-0.12.8";
433 433 buildInputs = with self; [];
434 434 doCheck = false;
435 435 propagatedBuildInputs = with self; [];
436 436 src = fetchurl {
437 437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
438 438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
439 439 };
440 440 meta = {
441 441 license = [ pkgs.lib.licenses.mit ];
442 442 };
443 443 };
444 444 bumpversion = super.buildPythonPackage {
445 445 name = "bumpversion-0.5.3";
446 446 buildInputs = with self; [];
447 447 doCheck = false;
448 448 propagatedBuildInputs = with self; [];
449 449 src = fetchurl {
450 450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
451 451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
452 452 };
453 453 meta = {
454 454 license = [ pkgs.lib.licenses.mit ];
455 455 };
456 456 };
457 457 celery = super.buildPythonPackage {
458 458 name = "celery-2.2.10";
459 459 buildInputs = with self; [];
460 460 doCheck = false;
461 461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
462 462 src = fetchurl {
463 463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
464 464 md5 = "898bc87e54f278055b561316ba73e222";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 468 };
469 469 };
470 470 click = super.buildPythonPackage {
471 471 name = "click-5.1";
472 472 buildInputs = with self; [];
473 473 doCheck = false;
474 474 propagatedBuildInputs = with self; [];
475 475 src = fetchurl {
476 476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
477 477 md5 = "9c5323008cccfe232a8b161fc8196d41";
478 478 };
479 479 meta = {
480 480 license = [ pkgs.lib.licenses.bsdOriginal ];
481 481 };
482 482 };
483 483 colander = super.buildPythonPackage {
484 484 name = "colander-1.2";
485 485 buildInputs = with self; [];
486 486 doCheck = false;
487 487 propagatedBuildInputs = with self; [translationstring iso8601];
488 488 src = fetchurl {
489 489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
490 490 md5 = "83db21b07936a0726e588dae1914b9ed";
491 491 };
492 492 meta = {
493 493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
494 494 };
495 495 };
496 496 configobj = super.buildPythonPackage {
497 497 name = "configobj-5.0.6";
498 498 buildInputs = with self; [];
499 499 doCheck = false;
500 500 propagatedBuildInputs = with self; [six];
501 501 src = fetchurl {
502 502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
503 503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
504 504 };
505 505 meta = {
506 506 license = [ pkgs.lib.licenses.bsdOriginal ];
507 507 };
508 508 };
509 509 cov-core = super.buildPythonPackage {
510 510 name = "cov-core-1.15.0";
511 511 buildInputs = with self; [];
512 512 doCheck = false;
513 513 propagatedBuildInputs = with self; [coverage];
514 514 src = fetchurl {
515 515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
516 516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
517 517 };
518 518 meta = {
519 519 license = [ pkgs.lib.licenses.mit ];
520 520 };
521 521 };
522 522 coverage = super.buildPythonPackage {
523 523 name = "coverage-3.7.1";
524 524 buildInputs = with self; [];
525 525 doCheck = false;
526 526 propagatedBuildInputs = with self; [];
527 527 src = fetchurl {
528 528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
529 529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
530 530 };
531 531 meta = {
532 532 license = [ pkgs.lib.licenses.bsdOriginal ];
533 533 };
534 534 };
535 535 cssselect = super.buildPythonPackage {
536 536 name = "cssselect-0.9.1";
537 537 buildInputs = with self; [];
538 538 doCheck = false;
539 539 propagatedBuildInputs = with self; [];
540 540 src = fetchurl {
541 541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
542 542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
543 543 };
544 544 meta = {
545 545 license = [ pkgs.lib.licenses.bsdOriginal ];
546 546 };
547 547 };
548 548 decorator = super.buildPythonPackage {
549 549 name = "decorator-3.4.2";
550 550 buildInputs = with self; [];
551 551 doCheck = false;
552 552 propagatedBuildInputs = with self; [];
553 553 src = fetchurl {
554 554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
555 555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
556 556 };
557 557 meta = {
558 558 license = [ pkgs.lib.licenses.bsdOriginal ];
559 559 };
560 560 };
561 561 docutils = super.buildPythonPackage {
562 562 name = "docutils-0.12";
563 563 buildInputs = with self; [];
564 564 doCheck = false;
565 565 propagatedBuildInputs = with self; [];
566 566 src = fetchurl {
567 567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
568 568 md5 = "4622263b62c5c771c03502afa3157768";
569 569 };
570 570 meta = {
571 571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
572 572 };
573 573 };
574 574 dogpile.cache = super.buildPythonPackage {
575 575 name = "dogpile.cache-0.6.1";
576 576 buildInputs = with self; [];
577 577 doCheck = false;
578 578 propagatedBuildInputs = with self; [dogpile.core];
579 579 src = fetchurl {
580 580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
581 581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
582 582 };
583 583 meta = {
584 584 license = [ pkgs.lib.licenses.bsdOriginal ];
585 585 };
586 586 };
587 587 dogpile.core = super.buildPythonPackage {
588 588 name = "dogpile.core-0.4.1";
589 589 buildInputs = with self; [];
590 590 doCheck = false;
591 591 propagatedBuildInputs = with self; [];
592 592 src = fetchurl {
593 593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
594 594 md5 = "01cb19f52bba3e95c9b560f39341f045";
595 595 };
596 596 meta = {
597 597 license = [ pkgs.lib.licenses.bsdOriginal ];
598 598 };
599 599 };
600 600 dulwich = super.buildPythonPackage {
601 601 name = "dulwich-0.12.0";
602 602 buildInputs = with self; [];
603 603 doCheck = false;
604 604 propagatedBuildInputs = with self; [];
605 605 src = fetchurl {
606 606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
607 607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
608 608 };
609 609 meta = {
610 610 license = [ pkgs.lib.licenses.gpl2Plus ];
611 611 };
612 612 };
613 613 ecdsa = super.buildPythonPackage {
614 614 name = "ecdsa-0.11";
615 615 buildInputs = with self; [];
616 616 doCheck = false;
617 617 propagatedBuildInputs = with self; [];
618 618 src = fetchurl {
619 619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
620 620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
621 621 };
622 622 meta = {
623 623 license = [ pkgs.lib.licenses.mit ];
624 624 };
625 625 };
626 626 elasticsearch = super.buildPythonPackage {
627 627 name = "elasticsearch-2.3.0";
628 628 buildInputs = with self; [];
629 629 doCheck = false;
630 630 propagatedBuildInputs = with self; [urllib3];
631 631 src = fetchurl {
632 632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
633 633 md5 = "2550f3b51629cf1ef9636608af92c340";
634 634 };
635 635 meta = {
636 636 license = [ pkgs.lib.licenses.asl20 ];
637 637 };
638 638 };
639 639 elasticsearch-dsl = super.buildPythonPackage {
640 640 name = "elasticsearch-dsl-2.0.0";
641 641 buildInputs = with self; [];
642 642 doCheck = false;
643 643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
644 644 src = fetchurl {
645 645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
646 646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
647 647 };
648 648 meta = {
649 649 license = [ pkgs.lib.licenses.asl20 ];
650 650 };
651 651 };
652 652 flake8 = super.buildPythonPackage {
653 653 name = "flake8-2.4.1";
654 654 buildInputs = with self; [];
655 655 doCheck = false;
656 656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
657 657 src = fetchurl {
658 658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
659 659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
660 660 };
661 661 meta = {
662 662 license = [ pkgs.lib.licenses.mit ];
663 663 };
664 664 };
665 665 future = super.buildPythonPackage {
666 666 name = "future-0.14.3";
667 667 buildInputs = with self; [];
668 668 doCheck = false;
669 669 propagatedBuildInputs = with self; [];
670 670 src = fetchurl {
671 671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
672 672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
673 673 };
674 674 meta = {
675 675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
676 676 };
677 677 };
678 678 futures = super.buildPythonPackage {
679 679 name = "futures-3.0.2";
680 680 buildInputs = with self; [];
681 681 doCheck = false;
682 682 propagatedBuildInputs = with self; [];
683 683 src = fetchurl {
684 684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
685 685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
686 686 };
687 687 meta = {
688 688 license = [ pkgs.lib.licenses.bsdOriginal ];
689 689 };
690 690 };
691 691 gnureadline = super.buildPythonPackage {
692 692 name = "gnureadline-6.3.3";
693 693 buildInputs = with self; [];
694 694 doCheck = false;
695 695 propagatedBuildInputs = with self; [];
696 696 src = fetchurl {
697 697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
698 698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
699 699 };
700 700 meta = {
701 701 license = [ pkgs.lib.licenses.gpl1 ];
702 702 };
703 703 };
704 704 gprof2dot = super.buildPythonPackage {
705 705 name = "gprof2dot-2015.12.1";
706 706 buildInputs = with self; [];
707 707 doCheck = false;
708 708 propagatedBuildInputs = with self; [];
709 709 src = fetchurl {
710 710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
711 711 md5 = "e23bf4e2f94db032750c193384b4165b";
712 712 };
713 713 meta = {
714 714 license = [ { fullName = "LGPL"; } ];
715 715 };
716 716 };
717 717 gunicorn = super.buildPythonPackage {
718 718 name = "gunicorn-19.6.0";
719 719 buildInputs = with self; [];
720 720 doCheck = false;
721 721 propagatedBuildInputs = with self; [];
722 722 src = fetchurl {
723 723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
724 724 md5 = "338e5e8a83ea0f0625f768dba4597530";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 infrae.cache = super.buildPythonPackage {
731 731 name = "infrae.cache-1.0.1";
732 732 buildInputs = with self; [];
733 733 doCheck = false;
734 734 propagatedBuildInputs = with self; [Beaker repoze.lru];
735 735 src = fetchurl {
736 736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
737 737 md5 = "b09076a766747e6ed2a755cc62088e32";
738 738 };
739 739 meta = {
740 740 license = [ pkgs.lib.licenses.zpt21 ];
741 741 };
742 742 };
743 743 invoke = super.buildPythonPackage {
744 744 name = "invoke-0.13.0";
745 745 buildInputs = with self; [];
746 746 doCheck = false;
747 747 propagatedBuildInputs = with self; [];
748 748 src = fetchurl {
749 749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
750 750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
751 751 };
752 752 meta = {
753 753 license = [ pkgs.lib.licenses.bsdOriginal ];
754 754 };
755 755 };
756 756 ipdb = super.buildPythonPackage {
757 757 name = "ipdb-0.8";
758 758 buildInputs = with self; [];
759 759 doCheck = false;
760 760 propagatedBuildInputs = with self; [ipython];
761 761 src = fetchurl {
762 762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
763 763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.gpl1 ];
767 767 };
768 768 };
769 769 ipython = super.buildPythonPackage {
770 770 name = "ipython-3.1.0";
771 771 buildInputs = with self; [];
772 772 doCheck = false;
773 773 propagatedBuildInputs = with self; [];
774 774 src = fetchurl {
775 775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
776 776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
777 777 };
778 778 meta = {
779 779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 780 };
781 781 };
782 782 iso8601 = super.buildPythonPackage {
783 783 name = "iso8601-0.1.11";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
789 789 md5 = "b06d11cd14a64096f907086044f0fe38";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.mit ];
793 793 };
794 794 };
795 795 itsdangerous = super.buildPythonPackage {
796 796 name = "itsdangerous-0.24";
797 797 buildInputs = with self; [];
798 798 doCheck = false;
799 799 propagatedBuildInputs = with self; [];
800 800 src = fetchurl {
801 801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
802 802 md5 = "a3d55aa79369aef5345c036a8a26307f";
803 803 };
804 804 meta = {
805 805 license = [ pkgs.lib.licenses.bsdOriginal ];
806 806 };
807 807 };
808 808 kombu = super.buildPythonPackage {
809 809 name = "kombu-1.5.1";
810 810 buildInputs = with self; [];
811 811 doCheck = false;
812 812 propagatedBuildInputs = with self; [anyjson amqplib];
813 813 src = fetchurl {
814 814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
815 815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 819 };
820 820 };
821 821 lxml = super.buildPythonPackage {
822 822 name = "lxml-3.4.4";
823 823 buildInputs = with self; [];
824 824 doCheck = false;
825 825 propagatedBuildInputs = with self; [];
826 826 src = fetchurl {
827 827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
828 828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
829 829 };
830 830 meta = {
831 831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 832 };
833 833 };
834 marshmallow = super.buildPythonPackage {
835 name = "marshmallow-2.8.0";
836 buildInputs = with self; [];
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/4f/64/9393d77847d86981c84b88bbea627d30ff71b5ab1402636b366f73737817/marshmallow-2.8.0.tar.gz";
841 md5 = "204513fc123a3d9bdd7b63b9747f02e6";
842 };
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
845 };
846 };
834 847 mccabe = super.buildPythonPackage {
835 848 name = "mccabe-0.3";
836 849 buildInputs = with self; [];
837 850 doCheck = false;
838 851 propagatedBuildInputs = with self; [];
839 852 src = fetchurl {
840 853 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
841 854 md5 = "81640948ff226f8c12b3277059489157";
842 855 };
843 856 meta = {
844 857 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
845 858 };
846 859 };
847 860 meld3 = super.buildPythonPackage {
848 861 name = "meld3-1.0.2";
849 862 buildInputs = with self; [];
850 863 doCheck = false;
851 864 propagatedBuildInputs = with self; [];
852 865 src = fetchurl {
853 866 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
854 867 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
855 868 };
856 869 meta = {
857 870 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
858 871 };
859 872 };
860 873 mock = super.buildPythonPackage {
861 874 name = "mock-1.0.1";
862 875 buildInputs = with self; [];
863 876 doCheck = false;
864 877 propagatedBuildInputs = with self; [];
865 878 src = fetchurl {
866 879 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
867 880 md5 = "869f08d003c289a97c1a6610faf5e913";
868 881 };
869 882 meta = {
870 883 license = [ pkgs.lib.licenses.bsdOriginal ];
871 884 };
872 885 };
873 886 msgpack-python = super.buildPythonPackage {
874 887 name = "msgpack-python-0.4.6";
875 888 buildInputs = with self; [];
876 889 doCheck = false;
877 890 propagatedBuildInputs = with self; [];
878 891 src = fetchurl {
879 892 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
880 893 md5 = "8b317669314cf1bc881716cccdaccb30";
881 894 };
882 895 meta = {
883 896 license = [ pkgs.lib.licenses.asl20 ];
884 897 };
885 898 };
886 899 nose = super.buildPythonPackage {
887 900 name = "nose-1.3.6";
888 901 buildInputs = with self; [];
889 902 doCheck = false;
890 903 propagatedBuildInputs = with self; [];
891 904 src = fetchurl {
892 905 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
893 906 md5 = "0ca546d81ca8309080fc80cb389e7a16";
894 907 };
895 908 meta = {
896 909 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
897 910 };
898 911 };
899 912 objgraph = super.buildPythonPackage {
900 913 name = "objgraph-2.0.0";
901 914 buildInputs = with self; [];
902 915 doCheck = false;
903 916 propagatedBuildInputs = with self; [];
904 917 src = fetchurl {
905 918 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
906 919 md5 = "25b0d5e5adc74aa63ead15699614159c";
907 920 };
908 921 meta = {
909 922 license = [ pkgs.lib.licenses.mit ];
910 923 };
911 924 };
912 925 packaging = super.buildPythonPackage {
913 926 name = "packaging-15.2";
914 927 buildInputs = with self; [];
915 928 doCheck = false;
916 929 propagatedBuildInputs = with self; [];
917 930 src = fetchurl {
918 931 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
919 932 md5 = "c16093476f6ced42128bf610e5db3784";
920 933 };
921 934 meta = {
922 935 license = [ pkgs.lib.licenses.asl20 ];
923 936 };
924 937 };
925 938 paramiko = super.buildPythonPackage {
926 939 name = "paramiko-1.15.1";
927 940 buildInputs = with self; [];
928 941 doCheck = false;
929 942 propagatedBuildInputs = with self; [pycrypto ecdsa];
930 943 src = fetchurl {
931 944 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
932 945 md5 = "48c274c3f9b1282932567b21f6acf3b5";
933 946 };
934 947 meta = {
935 948 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
936 949 };
937 950 };
938 951 pep8 = super.buildPythonPackage {
939 952 name = "pep8-1.5.7";
940 953 buildInputs = with self; [];
941 954 doCheck = false;
942 955 propagatedBuildInputs = with self; [];
943 956 src = fetchurl {
944 957 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
945 958 md5 = "f6adbdd69365ecca20513c709f9b7c93";
946 959 };
947 960 meta = {
948 961 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
949 962 };
950 963 };
951 964 psutil = super.buildPythonPackage {
952 965 name = "psutil-2.2.1";
953 966 buildInputs = with self; [];
954 967 doCheck = false;
955 968 propagatedBuildInputs = with self; [];
956 969 src = fetchurl {
957 970 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
958 971 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
959 972 };
960 973 meta = {
961 974 license = [ pkgs.lib.licenses.bsdOriginal ];
962 975 };
963 976 };
964 977 psycopg2 = super.buildPythonPackage {
965 978 name = "psycopg2-2.6.1";
966 979 buildInputs = with self; [];
967 980 doCheck = false;
968 981 propagatedBuildInputs = with self; [];
969 982 src = fetchurl {
970 983 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
971 984 md5 = "842b44f8c95517ed5b792081a2370da1";
972 985 };
973 986 meta = {
974 987 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
975 988 };
976 989 };
977 990 py = super.buildPythonPackage {
978 991 name = "py-1.4.29";
979 992 buildInputs = with self; [];
980 993 doCheck = false;
981 994 propagatedBuildInputs = with self; [];
982 995 src = fetchurl {
983 996 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
984 997 md5 = "c28e0accba523a29b35a48bb703fb96c";
985 998 };
986 999 meta = {
987 1000 license = [ pkgs.lib.licenses.mit ];
988 1001 };
989 1002 };
990 1003 py-bcrypt = super.buildPythonPackage {
991 1004 name = "py-bcrypt-0.4";
992 1005 buildInputs = with self; [];
993 1006 doCheck = false;
994 1007 propagatedBuildInputs = with self; [];
995 1008 src = fetchurl {
996 1009 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
997 1010 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
998 1011 };
999 1012 meta = {
1000 1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1001 1014 };
1002 1015 };
1003 1016 py-gfm = super.buildPythonPackage {
1004 1017 name = "py-gfm-0.1.3";
1005 1018 buildInputs = with self; [];
1006 1019 doCheck = false;
1007 1020 propagatedBuildInputs = with self; [setuptools Markdown];
1008 1021 src = fetchurl {
1009 1022 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1010 1023 md5 = "e588d9e69640a241b97e2c59c22527a6";
1011 1024 };
1012 1025 meta = {
1013 1026 license = [ pkgs.lib.licenses.bsdOriginal ];
1014 1027 };
1015 1028 };
1016 1029 pycrypto = super.buildPythonPackage {
1017 1030 name = "pycrypto-2.6.1";
1018 1031 buildInputs = with self; [];
1019 1032 doCheck = false;
1020 1033 propagatedBuildInputs = with self; [];
1021 1034 src = fetchurl {
1022 1035 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1023 1036 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1024 1037 };
1025 1038 meta = {
1026 1039 license = [ pkgs.lib.licenses.publicDomain ];
1027 1040 };
1028 1041 };
1029 1042 pycurl = super.buildPythonPackage {
1030 1043 name = "pycurl-7.19.5";
1031 1044 buildInputs = with self; [];
1032 1045 doCheck = false;
1033 1046 propagatedBuildInputs = with self; [];
1034 1047 src = fetchurl {
1035 1048 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1036 1049 md5 = "47b4eac84118e2606658122104e62072";
1037 1050 };
1038 1051 meta = {
1039 1052 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1040 1053 };
1041 1054 };
1042 1055 pyflakes = super.buildPythonPackage {
1043 1056 name = "pyflakes-0.8.1";
1044 1057 buildInputs = with self; [];
1045 1058 doCheck = false;
1046 1059 propagatedBuildInputs = with self; [];
1047 1060 src = fetchurl {
1048 1061 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1049 1062 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1050 1063 };
1051 1064 meta = {
1052 1065 license = [ pkgs.lib.licenses.mit ];
1053 1066 };
1054 1067 };
1055 1068 pyparsing = super.buildPythonPackage {
1056 1069 name = "pyparsing-1.5.7";
1057 1070 buildInputs = with self; [];
1058 1071 doCheck = false;
1059 1072 propagatedBuildInputs = with self; [];
1060 1073 src = fetchurl {
1061 1074 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1062 1075 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1063 1076 };
1064 1077 meta = {
1065 1078 license = [ pkgs.lib.licenses.mit ];
1066 1079 };
1067 1080 };
1068 1081 pyramid = super.buildPythonPackage {
1069 1082 name = "pyramid-1.6.1";
1070 1083 buildInputs = with self; [];
1071 1084 doCheck = false;
1072 1085 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1073 1086 src = fetchurl {
1074 1087 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1075 1088 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1076 1089 };
1077 1090 meta = {
1078 1091 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1079 1092 };
1080 1093 };
1081 1094 pyramid-beaker = super.buildPythonPackage {
1082 1095 name = "pyramid-beaker-0.8";
1083 1096 buildInputs = with self; [];
1084 1097 doCheck = false;
1085 1098 propagatedBuildInputs = with self; [pyramid Beaker];
1086 1099 src = fetchurl {
1087 1100 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1088 1101 md5 = "22f14be31b06549f80890e2c63a93834";
1089 1102 };
1090 1103 meta = {
1091 1104 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1092 1105 };
1093 1106 };
1094 1107 pyramid-debugtoolbar = super.buildPythonPackage {
1095 1108 name = "pyramid-debugtoolbar-2.4.2";
1096 1109 buildInputs = with self; [];
1097 1110 doCheck = false;
1098 1111 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1099 1112 src = fetchurl {
1100 1113 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1101 1114 md5 = "073ea67086cc4bd5decc3a000853642d";
1102 1115 };
1103 1116 meta = {
1104 1117 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1105 1118 };
1106 1119 };
1107 1120 pyramid-jinja2 = super.buildPythonPackage {
1108 1121 name = "pyramid-jinja2-2.5";
1109 1122 buildInputs = with self; [];
1110 1123 doCheck = false;
1111 1124 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1112 1125 src = fetchurl {
1113 1126 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1114 1127 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1115 1128 };
1116 1129 meta = {
1117 1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1118 1131 };
1119 1132 };
1120 1133 pyramid-mako = super.buildPythonPackage {
1121 1134 name = "pyramid-mako-1.0.2";
1122 1135 buildInputs = with self; [];
1123 1136 doCheck = false;
1124 1137 propagatedBuildInputs = with self; [pyramid Mako];
1125 1138 src = fetchurl {
1126 1139 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1127 1140 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1128 1141 };
1129 1142 meta = {
1130 1143 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1131 1144 };
1132 1145 };
1133 1146 pysqlite = super.buildPythonPackage {
1134 1147 name = "pysqlite-2.6.3";
1135 1148 buildInputs = with self; [];
1136 1149 doCheck = false;
1137 1150 propagatedBuildInputs = with self; [];
1138 1151 src = fetchurl {
1139 1152 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1140 1153 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1141 1154 };
1142 1155 meta = {
1143 1156 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1144 1157 };
1145 1158 };
1146 1159 pytest = super.buildPythonPackage {
1147 1160 name = "pytest-2.8.5";
1148 1161 buildInputs = with self; [];
1149 1162 doCheck = false;
1150 1163 propagatedBuildInputs = with self; [py];
1151 1164 src = fetchurl {
1152 1165 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1153 1166 md5 = "8493b06f700862f1294298d6c1b715a9";
1154 1167 };
1155 1168 meta = {
1156 1169 license = [ pkgs.lib.licenses.mit ];
1157 1170 };
1158 1171 };
1159 1172 pytest-catchlog = super.buildPythonPackage {
1160 1173 name = "pytest-catchlog-1.2.2";
1161 1174 buildInputs = with self; [];
1162 1175 doCheck = false;
1163 1176 propagatedBuildInputs = with self; [py pytest];
1164 1177 src = fetchurl {
1165 1178 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1166 1179 md5 = "09d890c54c7456c818102b7ff8c182c8";
1167 1180 };
1168 1181 meta = {
1169 1182 license = [ pkgs.lib.licenses.mit ];
1170 1183 };
1171 1184 };
1172 1185 pytest-cov = super.buildPythonPackage {
1173 1186 name = "pytest-cov-1.8.1";
1174 1187 buildInputs = with self; [];
1175 1188 doCheck = false;
1176 1189 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1177 1190 src = fetchurl {
1178 1191 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1179 1192 md5 = "76c778afa2494088270348be42d759fc";
1180 1193 };
1181 1194 meta = {
1182 1195 license = [ pkgs.lib.licenses.mit ];
1183 1196 };
1184 1197 };
1185 1198 pytest-profiling = super.buildPythonPackage {
1186 1199 name = "pytest-profiling-1.0.1";
1187 1200 buildInputs = with self; [];
1188 1201 doCheck = false;
1189 1202 propagatedBuildInputs = with self; [six pytest gprof2dot];
1190 1203 src = fetchurl {
1191 1204 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1192 1205 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1193 1206 };
1194 1207 meta = {
1195 1208 license = [ pkgs.lib.licenses.mit ];
1196 1209 };
1197 1210 };
1198 1211 pytest-runner = super.buildPythonPackage {
1199 1212 name = "pytest-runner-2.7.1";
1200 1213 buildInputs = with self; [];
1201 1214 doCheck = false;
1202 1215 propagatedBuildInputs = with self; [];
1203 1216 src = fetchurl {
1204 1217 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1205 1218 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1206 1219 };
1207 1220 meta = {
1208 1221 license = [ pkgs.lib.licenses.mit ];
1209 1222 };
1210 1223 };
1211 1224 pytest-timeout = super.buildPythonPackage {
1212 1225 name = "pytest-timeout-0.4";
1213 1226 buildInputs = with self; [];
1214 1227 doCheck = false;
1215 1228 propagatedBuildInputs = with self; [pytest];
1216 1229 src = fetchurl {
1217 1230 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1218 1231 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1219 1232 };
1220 1233 meta = {
1221 1234 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1222 1235 };
1223 1236 };
1224 1237 python-dateutil = super.buildPythonPackage {
1225 1238 name = "python-dateutil-1.5";
1226 1239 buildInputs = with self; [];
1227 1240 doCheck = false;
1228 1241 propagatedBuildInputs = with self; [];
1229 1242 src = fetchurl {
1230 1243 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1231 1244 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1232 1245 };
1233 1246 meta = {
1234 1247 license = [ pkgs.lib.licenses.psfl ];
1235 1248 };
1236 1249 };
1237 1250 python-editor = super.buildPythonPackage {
1238 1251 name = "python-editor-1.0.1";
1239 1252 buildInputs = with self; [];
1240 1253 doCheck = false;
1241 1254 propagatedBuildInputs = with self; [];
1242 1255 src = fetchurl {
1243 1256 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1244 1257 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1245 1258 };
1246 1259 meta = {
1247 1260 license = [ pkgs.lib.licenses.asl20 ];
1248 1261 };
1249 1262 };
1250 1263 python-ldap = super.buildPythonPackage {
1251 1264 name = "python-ldap-2.4.19";
1252 1265 buildInputs = with self; [];
1253 1266 doCheck = false;
1254 1267 propagatedBuildInputs = with self; [setuptools];
1255 1268 src = fetchurl {
1256 1269 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1257 1270 md5 = "b941bf31d09739492aa19ef679e94ae3";
1258 1271 };
1259 1272 meta = {
1260 1273 license = [ pkgs.lib.licenses.psfl ];
1261 1274 };
1262 1275 };
1263 1276 python-memcached = super.buildPythonPackage {
1264 1277 name = "python-memcached-1.57";
1265 1278 buildInputs = with self; [];
1266 1279 doCheck = false;
1267 1280 propagatedBuildInputs = with self; [six];
1268 1281 src = fetchurl {
1269 1282 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1270 1283 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1271 1284 };
1272 1285 meta = {
1273 1286 license = [ pkgs.lib.licenses.psfl ];
1274 1287 };
1275 1288 };
1276 1289 python-pam = super.buildPythonPackage {
1277 1290 name = "python-pam-1.8.2";
1278 1291 buildInputs = with self; [];
1279 1292 doCheck = false;
1280 1293 propagatedBuildInputs = with self; [];
1281 1294 src = fetchurl {
1282 1295 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1283 1296 md5 = "db71b6b999246fb05d78ecfbe166629d";
1284 1297 };
1285 1298 meta = {
1286 1299 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1287 1300 };
1288 1301 };
1289 1302 pytz = super.buildPythonPackage {
1290 1303 name = "pytz-2015.4";
1291 1304 buildInputs = with self; [];
1292 1305 doCheck = false;
1293 1306 propagatedBuildInputs = with self; [];
1294 1307 src = fetchurl {
1295 1308 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1296 1309 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1297 1310 };
1298 1311 meta = {
1299 1312 license = [ pkgs.lib.licenses.mit ];
1300 1313 };
1301 1314 };
1302 1315 pyzmq = super.buildPythonPackage {
1303 1316 name = "pyzmq-14.6.0";
1304 1317 buildInputs = with self; [];
1305 1318 doCheck = false;
1306 1319 propagatedBuildInputs = with self; [];
1307 1320 src = fetchurl {
1308 1321 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1309 1322 md5 = "395b5de95a931afa5b14c9349a5b8024";
1310 1323 };
1311 1324 meta = {
1312 1325 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1313 1326 };
1314 1327 };
1315 1328 recaptcha-client = super.buildPythonPackage {
1316 1329 name = "recaptcha-client-1.0.6";
1317 1330 buildInputs = with self; [];
1318 1331 doCheck = false;
1319 1332 propagatedBuildInputs = with self; [];
1320 1333 src = fetchurl {
1321 1334 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1322 1335 md5 = "74228180f7e1fb76c4d7089160b0d919";
1323 1336 };
1324 1337 meta = {
1325 1338 license = [ { fullName = "MIT/X11"; } ];
1326 1339 };
1327 1340 };
1328 1341 repoze.lru = super.buildPythonPackage {
1329 1342 name = "repoze.lru-0.6";
1330 1343 buildInputs = with self; [];
1331 1344 doCheck = false;
1332 1345 propagatedBuildInputs = with self; [];
1333 1346 src = fetchurl {
1334 1347 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1335 1348 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1336 1349 };
1337 1350 meta = {
1338 1351 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1339 1352 };
1340 1353 };
1341 1354 requests = super.buildPythonPackage {
1342 1355 name = "requests-2.9.1";
1343 1356 buildInputs = with self; [];
1344 1357 doCheck = false;
1345 1358 propagatedBuildInputs = with self; [];
1346 1359 src = fetchurl {
1347 1360 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1348 1361 md5 = "0b7f480d19012ec52bab78292efd976d";
1349 1362 };
1350 1363 meta = {
1351 1364 license = [ pkgs.lib.licenses.asl20 ];
1352 1365 };
1353 1366 };
1354 1367 rhodecode-enterprise-ce = super.buildPythonPackage {
1355 1368 name = "rhodecode-enterprise-ce-4.3.0";
1356 1369 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1357 1370 doCheck = true;
1358 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1371 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu marshmallow msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1359 1372 src = ./.;
1360 1373 meta = {
1361 1374 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1362 1375 };
1363 1376 };
1364 1377 rhodecode-tools = super.buildPythonPackage {
1365 1378 name = "rhodecode-tools-0.8.3";
1366 1379 buildInputs = with self; [];
1367 1380 doCheck = false;
1368 1381 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1369 1382 src = fetchurl {
1370 1383 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1371 1384 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1372 1385 };
1373 1386 meta = {
1374 1387 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1375 1388 };
1376 1389 };
1377 1390 serpent = super.buildPythonPackage {
1378 1391 name = "serpent-1.12";
1379 1392 buildInputs = with self; [];
1380 1393 doCheck = false;
1381 1394 propagatedBuildInputs = with self; [];
1382 1395 src = fetchurl {
1383 1396 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1384 1397 md5 = "05869ac7b062828b34f8f927f0457b65";
1385 1398 };
1386 1399 meta = {
1387 1400 license = [ pkgs.lib.licenses.mit ];
1388 1401 };
1389 1402 };
1390 1403 setproctitle = super.buildPythonPackage {
1391 1404 name = "setproctitle-1.1.8";
1392 1405 buildInputs = with self; [];
1393 1406 doCheck = false;
1394 1407 propagatedBuildInputs = with self; [];
1395 1408 src = fetchurl {
1396 1409 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1397 1410 md5 = "728f4c8c6031bbe56083a48594027edd";
1398 1411 };
1399 1412 meta = {
1400 1413 license = [ pkgs.lib.licenses.bsdOriginal ];
1401 1414 };
1402 1415 };
1403 1416 setuptools = super.buildPythonPackage {
1404 1417 name = "setuptools-20.8.1";
1405 1418 buildInputs = with self; [];
1406 1419 doCheck = false;
1407 1420 propagatedBuildInputs = with self; [];
1408 1421 src = fetchurl {
1409 1422 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1410 1423 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1411 1424 };
1412 1425 meta = {
1413 1426 license = [ pkgs.lib.licenses.mit ];
1414 1427 };
1415 1428 };
1416 1429 setuptools-scm = super.buildPythonPackage {
1417 1430 name = "setuptools-scm-1.11.0";
1418 1431 buildInputs = with self; [];
1419 1432 doCheck = false;
1420 1433 propagatedBuildInputs = with self; [];
1421 1434 src = fetchurl {
1422 1435 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1423 1436 md5 = "4c5c896ba52e134bbc3507bac6400087";
1424 1437 };
1425 1438 meta = {
1426 1439 license = [ pkgs.lib.licenses.mit ];
1427 1440 };
1428 1441 };
1429 1442 simplejson = super.buildPythonPackage {
1430 1443 name = "simplejson-3.7.2";
1431 1444 buildInputs = with self; [];
1432 1445 doCheck = false;
1433 1446 propagatedBuildInputs = with self; [];
1434 1447 src = fetchurl {
1435 1448 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1436 1449 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1437 1450 };
1438 1451 meta = {
1439 1452 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1440 1453 };
1441 1454 };
1442 1455 six = super.buildPythonPackage {
1443 1456 name = "six-1.9.0";
1444 1457 buildInputs = with self; [];
1445 1458 doCheck = false;
1446 1459 propagatedBuildInputs = with self; [];
1447 1460 src = fetchurl {
1448 1461 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1449 1462 md5 = "476881ef4012262dfc8adc645ee786c4";
1450 1463 };
1451 1464 meta = {
1452 1465 license = [ pkgs.lib.licenses.mit ];
1453 1466 };
1454 1467 };
1455 1468 subprocess32 = super.buildPythonPackage {
1456 1469 name = "subprocess32-3.2.6";
1457 1470 buildInputs = with self; [];
1458 1471 doCheck = false;
1459 1472 propagatedBuildInputs = with self; [];
1460 1473 src = fetchurl {
1461 1474 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1462 1475 md5 = "754c5ab9f533e764f931136974b618f1";
1463 1476 };
1464 1477 meta = {
1465 1478 license = [ pkgs.lib.licenses.psfl ];
1466 1479 };
1467 1480 };
1468 1481 supervisor = super.buildPythonPackage {
1469 1482 name = "supervisor-3.3.0";
1470 1483 buildInputs = with self; [];
1471 1484 doCheck = false;
1472 1485 propagatedBuildInputs = with self; [meld3];
1473 1486 src = fetchurl {
1474 1487 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1475 1488 md5 = "46bac00378d1eddb616752b990c67416";
1476 1489 };
1477 1490 meta = {
1478 1491 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1479 1492 };
1480 1493 };
1481 1494 transifex-client = super.buildPythonPackage {
1482 1495 name = "transifex-client-0.10";
1483 1496 buildInputs = with self; [];
1484 1497 doCheck = false;
1485 1498 propagatedBuildInputs = with self; [];
1486 1499 src = fetchurl {
1487 1500 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1488 1501 md5 = "5549538d84b8eede6b254cd81ae024fa";
1489 1502 };
1490 1503 meta = {
1491 1504 license = [ pkgs.lib.licenses.gpl2 ];
1492 1505 };
1493 1506 };
1494 1507 translationstring = super.buildPythonPackage {
1495 1508 name = "translationstring-1.3";
1496 1509 buildInputs = with self; [];
1497 1510 doCheck = false;
1498 1511 propagatedBuildInputs = with self; [];
1499 1512 src = fetchurl {
1500 1513 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1501 1514 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1502 1515 };
1503 1516 meta = {
1504 1517 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1505 1518 };
1506 1519 };
1507 1520 trollius = super.buildPythonPackage {
1508 1521 name = "trollius-1.0.4";
1509 1522 buildInputs = with self; [];
1510 1523 doCheck = false;
1511 1524 propagatedBuildInputs = with self; [futures];
1512 1525 src = fetchurl {
1513 1526 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1514 1527 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1515 1528 };
1516 1529 meta = {
1517 1530 license = [ pkgs.lib.licenses.asl20 ];
1518 1531 };
1519 1532 };
1520 1533 uWSGI = super.buildPythonPackage {
1521 1534 name = "uWSGI-2.0.11.2";
1522 1535 buildInputs = with self; [];
1523 1536 doCheck = false;
1524 1537 propagatedBuildInputs = with self; [];
1525 1538 src = fetchurl {
1526 1539 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1527 1540 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1528 1541 };
1529 1542 meta = {
1530 1543 license = [ pkgs.lib.licenses.gpl2 ];
1531 1544 };
1532 1545 };
1533 1546 urllib3 = super.buildPythonPackage {
1534 1547 name = "urllib3-1.16";
1535 1548 buildInputs = with self; [];
1536 1549 doCheck = false;
1537 1550 propagatedBuildInputs = with self; [];
1538 1551 src = fetchurl {
1539 1552 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1540 1553 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1541 1554 };
1542 1555 meta = {
1543 1556 license = [ pkgs.lib.licenses.mit ];
1544 1557 };
1545 1558 };
1546 1559 venusian = super.buildPythonPackage {
1547 1560 name = "venusian-1.0";
1548 1561 buildInputs = with self; [];
1549 1562 doCheck = false;
1550 1563 propagatedBuildInputs = with self; [];
1551 1564 src = fetchurl {
1552 1565 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1553 1566 md5 = "dccf2eafb7113759d60c86faf5538756";
1554 1567 };
1555 1568 meta = {
1556 1569 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1557 1570 };
1558 1571 };
1559 1572 waitress = super.buildPythonPackage {
1560 1573 name = "waitress-0.8.9";
1561 1574 buildInputs = with self; [];
1562 1575 doCheck = false;
1563 1576 propagatedBuildInputs = with self; [setuptools];
1564 1577 src = fetchurl {
1565 1578 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1566 1579 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1567 1580 };
1568 1581 meta = {
1569 1582 license = [ pkgs.lib.licenses.zpt21 ];
1570 1583 };
1571 1584 };
1572 1585 wsgiref = super.buildPythonPackage {
1573 1586 name = "wsgiref-0.1.2";
1574 1587 buildInputs = with self; [];
1575 1588 doCheck = false;
1576 1589 propagatedBuildInputs = with self; [];
1577 1590 src = fetchurl {
1578 1591 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1579 1592 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1580 1593 };
1581 1594 meta = {
1582 1595 license = [ { fullName = "PSF or ZPL"; } ];
1583 1596 };
1584 1597 };
1585 1598 zope.cachedescriptors = super.buildPythonPackage {
1586 1599 name = "zope.cachedescriptors-4.0.0";
1587 1600 buildInputs = with self; [];
1588 1601 doCheck = false;
1589 1602 propagatedBuildInputs = with self; [setuptools];
1590 1603 src = fetchurl {
1591 1604 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1592 1605 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1593 1606 };
1594 1607 meta = {
1595 1608 license = [ pkgs.lib.licenses.zpt21 ];
1596 1609 };
1597 1610 };
1598 1611 zope.deprecation = super.buildPythonPackage {
1599 1612 name = "zope.deprecation-4.1.2";
1600 1613 buildInputs = with self; [];
1601 1614 doCheck = false;
1602 1615 propagatedBuildInputs = with self; [setuptools];
1603 1616 src = fetchurl {
1604 1617 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1605 1618 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1606 1619 };
1607 1620 meta = {
1608 1621 license = [ pkgs.lib.licenses.zpt21 ];
1609 1622 };
1610 1623 };
1611 1624 zope.event = super.buildPythonPackage {
1612 1625 name = "zope.event-4.0.3";
1613 1626 buildInputs = with self; [];
1614 1627 doCheck = false;
1615 1628 propagatedBuildInputs = with self; [setuptools];
1616 1629 src = fetchurl {
1617 1630 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1618 1631 md5 = "9a3780916332b18b8b85f522bcc3e249";
1619 1632 };
1620 1633 meta = {
1621 1634 license = [ pkgs.lib.licenses.zpt21 ];
1622 1635 };
1623 1636 };
1624 1637 zope.interface = super.buildPythonPackage {
1625 1638 name = "zope.interface-4.1.3";
1626 1639 buildInputs = with self; [];
1627 1640 doCheck = false;
1628 1641 propagatedBuildInputs = with self; [setuptools];
1629 1642 src = fetchurl {
1630 1643 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1631 1644 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1632 1645 };
1633 1646 meta = {
1634 1647 license = [ pkgs.lib.licenses.zpt21 ];
1635 1648 };
1636 1649 };
1637 1650
1638 1651 ### Test requirements
1639 1652
1640
1653
1641 1654 }
@@ -1,151 +1,152 b''
1 1 Babel==1.3
2 2 Beaker==1.7.0
3 3 CProfileV==1.0.6
4 4 Fabric==1.10.0
5 5 FormEncode==1.2.4
6 6 Jinja2==2.7.3
7 7 Mako==1.0.1
8 8 Markdown==2.6.2
9 9 MarkupSafe==0.23
10 10 MySQL-python==1.2.5
11 11 Paste==2.0.2
12 12 PasteDeploy==1.5.2
13 13 PasteScript==1.7.5
14 14 Pygments==2.1.3
15 15
16 16 # TODO: This version is not available on PyPI
17 17 # Pylons==1.0.2.dev20160108
18 18 Pylons==1.0.1
19 19
20 20 # TODO: This version is not available, but newer ones are
21 21 # Pyro4==4.35
22 22 Pyro4==4.41
23 23
24 24 # TODO: This should probably not be in here
25 25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
26 26
27 27 # TODO: This is not really a dependency, we should add it only
28 28 # into the development environment, since there it is useful.
29 29 # RhodeCodeVCSServer==3.9.0
30 30
31 31 Routes==1.13
32 32 SQLAlchemy==0.9.9
33 33 Sphinx==1.2.2
34 34 Tempita==0.5.2
35 35 URLObject==2.4.0
36 36 WebError==0.10.3
37 37
38 38 # TODO: This is modified by us, needs a better integration. For now
39 39 # using the latest version before.
40 40 # WebHelpers==1.3.dev20150807
41 41 WebHelpers==1.3
42 42
43 43 WebHelpers2==2.0
44 44 WebOb==1.3.1
45 45 WebTest==1.4.3
46 46 Whoosh==2.7.0
47 47 alembic==0.8.4
48 48 amqplib==1.0.2
49 49 anyjson==0.3.3
50 50 appenlight-client==0.6.14
51 51 authomatic==0.1.0.post1;
52 52 backport-ipaddress==0.1
53 53 bottle==0.12.8
54 54 bumpversion==0.5.3
55 55 celery==2.2.10
56 56 click==5.1
57 57 colander==1.2
58 58 configobj==5.0.6
59 59 cov-core==1.15.0
60 60 coverage==3.7.1
61 61 cssselect==0.9.1
62 62 decorator==3.4.2
63 63 docutils==0.12
64 64 dogpile.cache==0.6.1
65 65 dogpile.core==0.4.1
66 66 dulwich==0.12.0
67 67 ecdsa==0.11
68 68 flake8==2.4.1
69 69 future==0.14.3
70 70 futures==3.0.2
71 71 gprof2dot==2015.12.1
72 72 gunicorn==19.6.0
73 73
74 74 # TODO: Needs subvertpy and blows up without Subversion headers,
75 75 # actually we should not need this for Enterprise at all.
76 76 # hgsubversion==1.8.2
77 77
78 78 gnureadline==6.3.3
79 79 infrae.cache==1.0.1
80 80 invoke==0.13.0
81 81 ipdb==0.8
82 82 ipython==3.1.0
83 83 iso8601==0.1.11
84 84 itsdangerous==0.24
85 85 kombu==1.5.1
86 86 lxml==3.4.4
87 marshmallow==2.8.0
87 88 mccabe==0.3
88 89 meld3==1.0.2
89 90 mock==1.0.1
90 91 msgpack-python==0.4.6
91 92 nose==1.3.6
92 93 objgraph==2.0.0
93 94 packaging==15.2
94 95 paramiko==1.15.1
95 96 pep8==1.5.7
96 97 psutil==2.2.1
97 98 psycopg2==2.6.1
98 99 py==1.4.29
99 100 py-bcrypt==0.4
100 101 py-gfm==0.1.3
101 102 pycrypto==2.6.1
102 103 pycurl==7.19.5
103 104 pyflakes==0.8.1
104 105 pyparsing==1.5.7
105 106 pyramid==1.6.1
106 107 pyramid-beaker==0.8
107 108 pyramid-debugtoolbar==2.4.2
108 109 pyramid-jinja2==2.5
109 110 pyramid-mako==1.0.2
110 111 pysqlite==2.6.3
111 112 pytest==2.8.5
112 113 pytest-runner==2.7.1
113 114 pytest-catchlog==1.2.2
114 115 pytest-cov==1.8.1
115 116 pytest-profiling==1.0.1
116 117 pytest-timeout==0.4
117 118 python-dateutil==1.5
118 119 python-ldap==2.4.19
119 120 python-memcached==1.57
120 121 python-pam==1.8.2
121 122 pytz==2015.4
122 123 pyzmq==14.6.0
123 124
124 125 # TODO: This is not available in public
125 126 # rc-testdata==0.2.0
126 127
127 128 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
128 129
129 130
130 131 recaptcha-client==1.0.6
131 132 repoze.lru==0.6
132 133 requests==2.9.1
133 134 serpent==1.12
134 135 setproctitle==1.1.8
135 136 setuptools==20.8.1
136 137 setuptools-scm==1.11.0
137 138 simplejson==3.7.2
138 139 six==1.9.0
139 140 subprocess32==3.2.6
140 141 supervisor==3.3.0
141 142 transifex-client==0.10
142 143 translationstring==1.3
143 144 trollius==1.0.4
144 145 uWSGI==2.0.11.2
145 146 venusian==1.0
146 147 waitress==0.8.9
147 148 wsgiref==0.1.2
148 149 zope.cachedescriptors==4.0.0
149 150 zope.deprecation==4.1.2
150 151 zope.event==4.0.3
151 152 zope.interface==4.1.3
@@ -1,158 +1,163 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.model.repo import RepoModel
25 25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 28 from rhodecode.tests.fixture import Fixture
29 29
30 30
31 31 fixture = Fixture()
32 32
33 33 UPDATE_REPO_NAME = 'api_update_me'
34 34
35 35 class SAME_AS_UPDATES(object): """ Constant used for tests below """
36 36
37 37 @pytest.mark.usefixtures("testuser_api", "app")
38 38 class TestApiUpdateRepo(object):
39 39
40 40 @pytest.mark.parametrize("updates, expected", [
41 41 ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES),
42 42 ({'description': 'new description'}, SAME_AS_UPDATES),
43 43 ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES),
44 44 ({'clone_uri': None}, {'clone_uri': ''}),
45 45 ({'clone_uri': ''}, {'clone_uri': ''}),
46 46 ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}),
47 47 ({'enable_statistics': True}, SAME_AS_UPDATES),
48 48 ({'enable_locking': True}, SAME_AS_UPDATES),
49 49 ({'enable_downloads': True}, SAME_AS_UPDATES),
50 ({'name': 'new_repo_name'}, {'repo_name': 'new_repo_name'}),
51 ({'group': 'test_group_for_update'},
52 {'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME}),
50 ({'name': 'new_repo_name'}, {
51 'repo_name': 'new_repo_name',
52 'url': 'http://test.example.com:80/new_repo_name',
53 }),
54 ({'group': 'test_group_for_update'}, {
55 'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME,
56 'url': 'http://test.example.com:80/test_group_for_update/%s' % UPDATE_REPO_NAME
57 }),
53 58 ])
54 59 def test_api_update_repo(self, updates, expected, backend):
55 60 repo_name = UPDATE_REPO_NAME
56 61 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
57 62 if updates.get('group'):
58 63 fixture.create_repo_group(updates['group'])
59 64
60 65 expected_api_data = repo.get_api_data(include_secrets=True)
61 66 if expected is SAME_AS_UPDATES:
62 67 expected_api_data.update(updates)
63 68 else:
64 69 expected_api_data.update(expected)
65 70
66 71
67 72 id_, params = build_data(
68 73 self.apikey, 'update_repo', repoid=repo_name, **updates)
69 74 response = api_call(self.app, params)
70 75
71 76 if updates.get('name'):
72 77 repo_name = updates['name']
73 78 if updates.get('group'):
74 79 repo_name = '/'.join([updates['group'], repo_name])
75 80
76 81 try:
77 82 expected = {
78 83 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
79 84 'repository': jsonify(expected_api_data)
80 85 }
81 86 assert_ok(id_, expected, given=response.body)
82 87 finally:
83 88 fixture.destroy_repo(repo_name)
84 89 if updates.get('group'):
85 90 fixture.destroy_repo_group(updates['group'])
86 91
87 92 def test_api_update_repo_fork_of_field(self, backend):
88 93 master_repo = backend.create_repo()
89 94 repo = backend.create_repo()
90 95 updates = {
91 96 'fork_of': master_repo.repo_name
92 97 }
93 98 expected_api_data = repo.get_api_data(include_secrets=True)
94 99 expected_api_data.update(updates)
95 100
96 101 id_, params = build_data(
97 102 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
98 103 response = api_call(self.app, params)
99 104 expected = {
100 105 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
101 106 'repository': jsonify(expected_api_data)
102 107 }
103 108 assert_ok(id_, expected, given=response.body)
104 109 result = response.json['result']['repository']
105 110 assert result['fork_of'] == master_repo.repo_name
106 111
107 112 def test_api_update_repo_fork_of_not_found(self, backend):
108 113 master_repo_name = 'fake-parent-repo'
109 114 repo = backend.create_repo()
110 115 updates = {
111 116 'fork_of': master_repo_name
112 117 }
113 118 id_, params = build_data(
114 119 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
115 120 response = api_call(self.app, params)
116 121 expected = 'repository `{}` does not exist'.format(master_repo_name)
117 122 assert_error(id_, expected, given=response.body)
118 123
119 124 def test_api_update_repo_with_repo_group_not_existing(self):
120 125 repo_name = 'admin_owned'
121 126 fixture.create_repo(repo_name)
122 127 updates = {'group': 'test_group_for_update'}
123 128 id_, params = build_data(
124 129 self.apikey, 'update_repo', repoid=repo_name, **updates)
125 130 response = api_call(self.app, params)
126 131 try:
127 132 expected = 'repository group `%s` does not exist' % (
128 133 updates['group'],)
129 134 assert_error(id_, expected, given=response.body)
130 135 finally:
131 136 fixture.destroy_repo(repo_name)
132 137
133 138 def test_api_update_repo_regular_user_not_allowed(self):
134 139 repo_name = 'admin_owned'
135 140 fixture.create_repo(repo_name)
136 141 updates = {'active': False}
137 142 id_, params = build_data(
138 143 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
139 144 response = api_call(self.app, params)
140 145 try:
141 146 expected = 'repository `%s` does not exist' % (repo_name,)
142 147 assert_error(id_, expected, given=response.body)
143 148 finally:
144 149 fixture.destroy_repo(repo_name)
145 150
146 151 @mock.patch.object(RepoModel, 'update', crash)
147 152 def test_api_update_repo_exception_occurred(self, backend):
148 153 repo_name = UPDATE_REPO_NAME
149 154 fixture.create_repo(repo_name, repo_type=backend.alias)
150 155 id_, params = build_data(
151 156 self.apikey, 'update_repo', repoid=repo_name,
152 157 owner=TEST_USER_ADMIN_LOGIN,)
153 158 response = api_call(self.app, params)
154 159 try:
155 160 expected = 'failed to update repo `%s`' % (repo_name,)
156 161 assert_error(id_, expected, given=response.body)
157 162 finally:
158 163 fixture.destroy_repo(repo_name)
@@ -1,846 +1,847 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 pull requests controller for rhodecode for initializing pull requests
23 23 """
24 24
25 25 import formencode
26 26 import logging
27 27
28 28 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
29 29 from pylons import request, tmpl_context as c, url
30 30 from pylons.controllers.util import redirect
31 31 from pylons.i18n.translation import _
32 32 from sqlalchemy.sql import func
33 33 from sqlalchemy.sql.expression import or_
34 34
35 35 from rhodecode.lib import auth, diffs, helpers as h
36 36 from rhodecode.lib.ext_json import json
37 37 from rhodecode.lib.base import (
38 38 BaseRepoController, render, vcs_operation_context)
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
41 41 HasAcceptedRepoType, XHRRequired)
42 42 from rhodecode.lib.utils import jsonify
43 43 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError)
47 47 from rhodecode.lib.diffs import LimitedDiffContainer
48 48 from rhodecode.model.changeset_status import ChangesetStatusModel
49 49 from rhodecode.model.comment import ChangesetCommentsModel
50 50 from rhodecode.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
51 51 Repository
52 52 from rhodecode.model.forms import PullRequestForm
53 53 from rhodecode.model.meta import Session
54 54 from rhodecode.model.pull_request import PullRequestModel
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class PullrequestsController(BaseRepoController):
60 60 def __before__(self):
61 61 super(PullrequestsController, self).__before__()
62 62
63 63 def _load_compare_data(self, pull_request, enable_comments=True):
64 64 """
65 65 Load context data needed for generating compare diff
66 66
67 67 :param pull_request: object related to the request
68 68 :param enable_comments: flag to determine if comments are included
69 69 """
70 70 source_repo = pull_request.source_repo
71 71 source_ref_id = pull_request.source_ref_parts.commit_id
72 72
73 73 target_repo = pull_request.target_repo
74 74 target_ref_id = pull_request.target_ref_parts.commit_id
75 75
76 76 # despite opening commits for bookmarks/branches/tags, we always
77 77 # convert this to rev to prevent changes after bookmark or branch change
78 78 c.source_ref_type = 'rev'
79 79 c.source_ref = source_ref_id
80 80
81 81 c.target_ref_type = 'rev'
82 82 c.target_ref = target_ref_id
83 83
84 84 c.source_repo = source_repo
85 85 c.target_repo = target_repo
86 86
87 87 c.fulldiff = bool(request.GET.get('fulldiff'))
88 88
89 89 # diff_limit is the old behavior, will cut off the whole diff
90 90 # if the limit is applied otherwise will just hide the
91 91 # big files from the front-end
92 92 diff_limit = self.cut_off_limit_diff
93 93 file_limit = self.cut_off_limit_file
94 94
95 95 pre_load = ["author", "branch", "date", "message"]
96 96
97 97 c.commit_ranges = []
98 98 source_commit = EmptyCommit()
99 99 target_commit = EmptyCommit()
100 100 c.missing_requirements = False
101 101 try:
102 102 c.commit_ranges = [
103 103 source_repo.get_commit(commit_id=rev, pre_load=pre_load)
104 104 for rev in pull_request.revisions]
105 105
106 106 c.statuses = source_repo.statuses(
107 107 [x.raw_id for x in c.commit_ranges])
108 108
109 109 target_commit = source_repo.get_commit(
110 110 commit_id=safe_str(target_ref_id))
111 111 source_commit = source_repo.get_commit(
112 112 commit_id=safe_str(source_ref_id))
113 113 except RepositoryRequirementError:
114 114 c.missing_requirements = True
115 115
116 116 c.missing_commits = False
117 117 if (c.missing_requirements or
118 118 isinstance(source_commit, EmptyCommit) or
119 119 source_commit == target_commit):
120 120 _parsed = []
121 121 c.missing_commits = True
122 122 else:
123 123 vcs_diff = PullRequestModel().get_diff(pull_request)
124 124 diff_processor = diffs.DiffProcessor(
125 125 vcs_diff, format='gitdiff', diff_limit=diff_limit,
126 126 file_limit=file_limit, show_full_diff=c.fulldiff)
127 127 _parsed = diff_processor.prepare()
128 128
129 129 c.limited_diff = isinstance(_parsed, LimitedDiffContainer)
130 130
131 131 c.files = []
132 132 c.changes = {}
133 133 c.lines_added = 0
134 134 c.lines_deleted = 0
135 135 c.included_files = []
136 136 c.deleted_files = []
137 137
138 138 for f in _parsed:
139 139 st = f['stats']
140 140 c.lines_added += st['added']
141 141 c.lines_deleted += st['deleted']
142 142
143 143 fid = h.FID('', f['filename'])
144 144 c.files.append([fid, f['operation'], f['filename'], f['stats']])
145 145 c.included_files.append(f['filename'])
146 146 html_diff = diff_processor.as_html(enable_comments=enable_comments,
147 147 parsed_lines=[f])
148 148 c.changes[fid] = [f['operation'], f['filename'], html_diff, f]
149 149
150 150 def _extract_ordering(self, request):
151 151 column_index = safe_int(request.GET.get('order[0][column]'))
152 152 order_dir = request.GET.get('order[0][dir]', 'desc')
153 153 order_by = request.GET.get(
154 154 'columns[%s][data][sort]' % column_index, 'name_raw')
155 155 return order_by, order_dir
156 156
157 157 @LoginRequired()
158 158 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
159 159 'repository.admin')
160 160 @HasAcceptedRepoType('git', 'hg')
161 161 def show_all(self, repo_name):
162 162 # filter types
163 163 c.active = 'open'
164 164 c.source = str2bool(request.GET.get('source'))
165 165 c.closed = str2bool(request.GET.get('closed'))
166 166 c.my = str2bool(request.GET.get('my'))
167 167 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
168 168 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
169 169 c.repo_name = repo_name
170 170
171 171 opened_by = None
172 172 if c.my:
173 173 c.active = 'my'
174 174 opened_by = [c.rhodecode_user.user_id]
175 175
176 176 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
177 177 if c.closed:
178 178 c.active = 'closed'
179 179 statuses = [PullRequest.STATUS_CLOSED]
180 180
181 181 if c.awaiting_review and not c.source:
182 182 c.active = 'awaiting'
183 183 if c.source and not c.awaiting_review:
184 184 c.active = 'source'
185 185 if c.awaiting_my_review:
186 186 c.active = 'awaiting_my'
187 187
188 188 data = self._get_pull_requests_list(
189 189 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
190 190 if not request.is_xhr:
191 191 c.data = json.dumps(data['data'])
192 192 c.records_total = data['recordsTotal']
193 193 return render('/pullrequests/pullrequests.html')
194 194 else:
195 195 return json.dumps(data)
196 196
197 197 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
198 198 # pagination
199 199 start = safe_int(request.GET.get('start'), 0)
200 200 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
201 201 order_by, order_dir = self._extract_ordering(request)
202 202
203 203 if c.awaiting_review:
204 204 pull_requests = PullRequestModel().get_awaiting_review(
205 205 repo_name, source=c.source, opened_by=opened_by,
206 206 statuses=statuses, offset=start, length=length,
207 207 order_by=order_by, order_dir=order_dir)
208 208 pull_requests_total_count = PullRequestModel(
209 209 ).count_awaiting_review(
210 210 repo_name, source=c.source, statuses=statuses,
211 211 opened_by=opened_by)
212 212 elif c.awaiting_my_review:
213 213 pull_requests = PullRequestModel().get_awaiting_my_review(
214 214 repo_name, source=c.source, opened_by=opened_by,
215 215 user_id=c.rhodecode_user.user_id, statuses=statuses,
216 216 offset=start, length=length, order_by=order_by,
217 217 order_dir=order_dir)
218 218 pull_requests_total_count = PullRequestModel(
219 219 ).count_awaiting_my_review(
220 220 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
221 221 statuses=statuses, opened_by=opened_by)
222 222 else:
223 223 pull_requests = PullRequestModel().get_all(
224 224 repo_name, source=c.source, opened_by=opened_by,
225 225 statuses=statuses, offset=start, length=length,
226 226 order_by=order_by, order_dir=order_dir)
227 227 pull_requests_total_count = PullRequestModel().count_all(
228 228 repo_name, source=c.source, statuses=statuses,
229 229 opened_by=opened_by)
230 230
231 231 from rhodecode.lib.utils import PartialRenderer
232 232 _render = PartialRenderer('data_table/_dt_elements.html')
233 233 data = []
234 234 for pr in pull_requests:
235 235 comments = ChangesetCommentsModel().get_all_comments(
236 236 c.rhodecode_db_repo.repo_id, pull_request=pr)
237 237
238 238 data.append({
239 239 'name': _render('pullrequest_name',
240 240 pr.pull_request_id, pr.target_repo.repo_name),
241 241 'name_raw': pr.pull_request_id,
242 242 'status': _render('pullrequest_status',
243 243 pr.calculated_review_status()),
244 244 'title': _render(
245 245 'pullrequest_title', pr.title, pr.description),
246 246 'description': h.escape(pr.description),
247 247 'updated_on': _render('pullrequest_updated_on',
248 248 h.datetime_to_time(pr.updated_on)),
249 249 'updated_on_raw': h.datetime_to_time(pr.updated_on),
250 250 'created_on': _render('pullrequest_updated_on',
251 251 h.datetime_to_time(pr.created_on)),
252 252 'created_on_raw': h.datetime_to_time(pr.created_on),
253 253 'author': _render('pullrequest_author',
254 254 pr.author.full_contact, ),
255 255 'author_raw': pr.author.full_name,
256 256 'comments': _render('pullrequest_comments', len(comments)),
257 257 'comments_raw': len(comments),
258 258 'closed': pr.is_closed(),
259 259 })
260 260 # json used to render the grid
261 261 data = ({
262 262 'data': data,
263 263 'recordsTotal': pull_requests_total_count,
264 264 'recordsFiltered': pull_requests_total_count,
265 265 })
266 266 return data
267 267
268 268 @LoginRequired()
269 269 @NotAnonymous()
270 270 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
271 271 'repository.admin')
272 272 @HasAcceptedRepoType('git', 'hg')
273 273 def index(self):
274 274 source_repo = c.rhodecode_db_repo
275 275
276 276 try:
277 277 source_repo.scm_instance().get_commit()
278 278 except EmptyRepositoryError:
279 279 h.flash(h.literal(_('There are no commits yet')),
280 280 category='warning')
281 281 redirect(url('summary_home', repo_name=source_repo.repo_name))
282 282
283 283 commit_id = request.GET.get('commit')
284 284 branch_ref = request.GET.get('branch')
285 285 bookmark_ref = request.GET.get('bookmark')
286 286
287 287 try:
288 288 source_repo_data = PullRequestModel().generate_repo_data(
289 289 source_repo, commit_id=commit_id,
290 290 branch=branch_ref, bookmark=bookmark_ref)
291 291 except CommitDoesNotExistError as e:
292 292 log.exception(e)
293 293 h.flash(_('Commit does not exist'), 'error')
294 294 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
295 295
296 296 default_target_repo = source_repo
297 297 if (source_repo.parent and
298 298 not source_repo.parent.scm_instance().is_empty()):
299 299 # change default if we have a parent repo
300 300 default_target_repo = source_repo.parent
301 301
302 302 target_repo_data = PullRequestModel().generate_repo_data(
303 303 default_target_repo)
304 304
305 305 selected_source_ref = source_repo_data['refs']['selected_ref']
306 306
307 307 title_source_ref = selected_source_ref.split(':', 2)[1]
308 308 c.default_title = PullRequestModel().generate_pullrequest_title(
309 309 source=source_repo.repo_name,
310 310 source_ref=title_source_ref,
311 311 target=default_target_repo.repo_name
312 312 )
313 313
314 314 c.default_repo_data = {
315 315 'source_repo_name': source_repo.repo_name,
316 316 'source_refs_json': json.dumps(source_repo_data),
317 317 'target_repo_name': default_target_repo.repo_name,
318 318 'target_refs_json': json.dumps(target_repo_data),
319 319 }
320 320 c.default_source_ref = selected_source_ref
321 321
322 322 return render('/pullrequests/pullrequest.html')
323 323
324 324 @LoginRequired()
325 325 @NotAnonymous()
326 326 @XHRRequired()
327 327 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
328 328 'repository.admin')
329 329 @jsonify
330 330 def get_repo_refs(self, repo_name, target_repo_name):
331 331 repo = Repository.get_by_repo_name(target_repo_name)
332 332 if not repo:
333 333 raise HTTPNotFound
334 334 return PullRequestModel().generate_repo_data(repo)
335 335
336 336 @LoginRequired()
337 337 @NotAnonymous()
338 338 @XHRRequired()
339 339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
340 340 'repository.admin')
341 341 @jsonify
342 342 def get_repo_destinations(self, repo_name):
343 343 repo = Repository.get_by_repo_name(repo_name)
344 344 if not repo:
345 345 raise HTTPNotFound
346 346 filter_query = request.GET.get('query')
347 347
348 348 query = Repository.query() \
349 349 .order_by(func.length(Repository.repo_name)) \
350 350 .filter(or_(
351 351 Repository.repo_name == repo.repo_name,
352 352 Repository.fork_id == repo.repo_id))
353 353
354 354 if filter_query:
355 355 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
356 356 query = query.filter(
357 357 Repository.repo_name.ilike(ilike_expression))
358 358
359 359 add_parent = False
360 360 if repo.parent:
361 361 if filter_query in repo.parent.repo_name:
362 362 if not repo.parent.scm_instance().is_empty():
363 363 add_parent = True
364 364
365 365 limit = 20 - 1 if add_parent else 20
366 366 all_repos = query.limit(limit).all()
367 367 if add_parent:
368 368 all_repos += [repo.parent]
369 369
370 370 repos = []
371 371 for obj in self.scm_model.get_repos(all_repos):
372 372 repos.append({
373 373 'id': obj['name'],
374 374 'text': obj['name'],
375 375 'type': 'repo',
376 376 'obj': obj['dbrepo']
377 377 })
378 378
379 379 data = {
380 380 'more': False,
381 381 'results': [{
382 382 'text': _('Repositories'),
383 383 'children': repos
384 384 }] if repos else []
385 385 }
386 386 return data
387 387
388 388 @LoginRequired()
389 389 @NotAnonymous()
390 390 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
391 391 'repository.admin')
392 392 @HasAcceptedRepoType('git', 'hg')
393 393 @auth.CSRFRequired()
394 394 def create(self, repo_name):
395 395 repo = Repository.get_by_repo_name(repo_name)
396 396 if not repo:
397 397 raise HTTPNotFound
398 398
399 399 try:
400 400 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
401 401 except formencode.Invalid as errors:
402 402 if errors.error_dict.get('revisions'):
403 403 msg = 'Revisions: %s' % errors.error_dict['revisions']
404 404 elif errors.error_dict.get('pullrequest_title'):
405 405 msg = _('Pull request requires a title with min. 3 chars')
406 406 else:
407 407 msg = _('Error creating pull request: {}').format(errors)
408 408 log.exception(msg)
409 409 h.flash(msg, 'error')
410 410
411 411 # would rather just go back to form ...
412 412 return redirect(url('pullrequest_home', repo_name=repo_name))
413 413
414 414 source_repo = _form['source_repo']
415 415 source_ref = _form['source_ref']
416 416 target_repo = _form['target_repo']
417 417 target_ref = _form['target_ref']
418 418 commit_ids = _form['revisions'][::-1]
419 419 reviewers = _form['review_members']
420 420
421 421 # find the ancestor for this pr
422 422 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
423 423 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
424 424
425 425 source_scm = source_db_repo.scm_instance()
426 426 target_scm = target_db_repo.scm_instance()
427 427
428 428 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
429 429 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
430 430
431 431 ancestor = source_scm.get_common_ancestor(
432 432 source_commit.raw_id, target_commit.raw_id, target_scm)
433 433
434 434 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
435 435 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
436 436
437 437 pullrequest_title = _form['pullrequest_title']
438 438 title_source_ref = source_ref.split(':', 2)[1]
439 439 if not pullrequest_title:
440 440 pullrequest_title = PullRequestModel().generate_pullrequest_title(
441 441 source=source_repo,
442 442 source_ref=title_source_ref,
443 443 target=target_repo
444 444 )
445 445
446 446 description = _form['pullrequest_desc']
447 447 try:
448 448 pull_request = PullRequestModel().create(
449 449 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
450 450 target_ref, commit_ids, reviewers, pullrequest_title,
451 451 description
452 452 )
453 453 Session().commit()
454 454 h.flash(_('Successfully opened new pull request'),
455 455 category='success')
456 456 except Exception as e:
457 raise
457 458 msg = _('Error occurred during sending pull request')
458 459 log.exception(msg)
459 460 h.flash(msg, category='error')
460 461 return redirect(url('pullrequest_home', repo_name=repo_name))
461 462
462 463 return redirect(url('pullrequest_show', repo_name=target_repo,
463 464 pull_request_id=pull_request.pull_request_id))
464 465
465 466 @LoginRequired()
466 467 @NotAnonymous()
467 468 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
468 469 'repository.admin')
469 470 @auth.CSRFRequired()
470 471 @jsonify
471 472 def update(self, repo_name, pull_request_id):
472 473 pull_request_id = safe_int(pull_request_id)
473 474 pull_request = PullRequest.get_or_404(pull_request_id)
474 475 # only owner or admin can update it
475 476 allowed_to_update = PullRequestModel().check_user_update(
476 477 pull_request, c.rhodecode_user)
477 478 if allowed_to_update:
478 479 if 'reviewers_ids' in request.POST:
479 480 self._update_reviewers(pull_request_id)
480 481 elif str2bool(request.POST.get('update_commits', 'false')):
481 482 self._update_commits(pull_request)
482 483 elif str2bool(request.POST.get('close_pull_request', 'false')):
483 484 self._reject_close(pull_request)
484 485 elif str2bool(request.POST.get('edit_pull_request', 'false')):
485 486 self._edit_pull_request(pull_request)
486 487 else:
487 488 raise HTTPBadRequest()
488 489 return True
489 490 raise HTTPForbidden()
490 491
491 492 def _edit_pull_request(self, pull_request):
492 493 try:
493 494 PullRequestModel().edit(
494 495 pull_request, request.POST.get('title'),
495 496 request.POST.get('description'))
496 497 except ValueError:
497 498 msg = _(u'Cannot update closed pull requests.')
498 499 h.flash(msg, category='error')
499 500 return
500 501 else:
501 502 Session().commit()
502 503
503 504 msg = _(u'Pull request title & description updated.')
504 505 h.flash(msg, category='success')
505 506 return
506 507
507 508 def _update_commits(self, pull_request):
508 509 try:
509 510 if PullRequestModel().has_valid_update_type(pull_request):
510 511 updated_version, changes = PullRequestModel().update_commits(
511 512 pull_request)
512 513 if updated_version:
513 514 msg = _(
514 515 u'Pull request updated to "{source_commit_id}" with '
515 516 u'{count_added} added, {count_removed} removed '
516 517 u'commits.'
517 518 ).format(
518 519 source_commit_id=pull_request.source_ref_parts.commit_id,
519 520 count_added=len(changes.added),
520 521 count_removed=len(changes.removed))
521 522 h.flash(msg, category='success')
522 523 else:
523 524 h.flash(_("Nothing changed in pull request."),
524 525 category='warning')
525 526 else:
526 527 msg = _(
527 528 u"Skipping update of pull request due to reference "
528 529 u"type: {reference_type}"
529 530 ).format(reference_type=pull_request.source_ref_parts.type)
530 531 h.flash(msg, category='warning')
531 532 except CommitDoesNotExistError:
532 533 h.flash(
533 534 _(u'Update failed due to missing commits.'), category='error')
534 535
535 536 @auth.CSRFRequired()
536 537 @LoginRequired()
537 538 @NotAnonymous()
538 539 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
539 540 'repository.admin')
540 541 def merge(self, repo_name, pull_request_id):
541 542 """
542 543 POST /{repo_name}/pull-request/{pull_request_id}
543 544
544 545 Merge will perform a server-side merge of the specified
545 546 pull request, if the pull request is approved and mergeable.
546 547 After succesfull merging, the pull request is automatically
547 548 closed, with a relevant comment.
548 549 """
549 550 pull_request_id = safe_int(pull_request_id)
550 551 pull_request = PullRequest.get_or_404(pull_request_id)
551 552 user = c.rhodecode_user
552 553
553 554 if self._meets_merge_pre_conditions(pull_request, user):
554 555 log.debug("Pre-conditions checked, trying to merge.")
555 556 extras = vcs_operation_context(
556 557 request.environ, repo_name=pull_request.target_repo.repo_name,
557 558 username=user.username, action='push',
558 559 scm=pull_request.target_repo.repo_type)
559 560 self._merge_pull_request(pull_request, user, extras)
560 561
561 562 return redirect(url(
562 563 'pullrequest_show',
563 564 repo_name=pull_request.target_repo.repo_name,
564 565 pull_request_id=pull_request.pull_request_id))
565 566
566 567 def _meets_merge_pre_conditions(self, pull_request, user):
567 568 if not PullRequestModel().check_user_merge(pull_request, user):
568 569 raise HTTPForbidden()
569 570
570 571 merge_status, msg = PullRequestModel().merge_status(pull_request)
571 572 if not merge_status:
572 573 log.debug("Cannot merge, not mergeable.")
573 574 h.flash(msg, category='error')
574 575 return False
575 576
576 577 if (pull_request.calculated_review_status()
577 578 is not ChangesetStatus.STATUS_APPROVED):
578 579 log.debug("Cannot merge, approval is pending.")
579 580 msg = _('Pull request reviewer approval is pending.')
580 581 h.flash(msg, category='error')
581 582 return False
582 583 return True
583 584
584 585 def _merge_pull_request(self, pull_request, user, extras):
585 586 merge_resp = PullRequestModel().merge(
586 587 pull_request, user, extras=extras)
587 588
588 589 if merge_resp.executed:
589 590 log.debug("The merge was successful, closing the pull request.")
590 591 PullRequestModel().close_pull_request(
591 592 pull_request.pull_request_id, user)
592 593 Session().commit()
593 594 msg = _('Pull request was successfully merged and closed.')
594 595 h.flash(msg, category='success')
595 596 else:
596 597 log.debug(
597 598 "The merge was not successful. Merge response: %s",
598 599 merge_resp)
599 600 msg = PullRequestModel().merge_status_message(
600 601 merge_resp.failure_reason)
601 602 h.flash(msg, category='error')
602 603
603 604 def _update_reviewers(self, pull_request_id):
604 605 reviewers_ids = map(int, filter(
605 606 lambda v: v not in [None, ''],
606 607 request.POST.get('reviewers_ids', '').split(',')))
607 608 PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
608 609 Session().commit()
609 610
610 611 def _reject_close(self, pull_request):
611 612 if pull_request.is_closed():
612 613 raise HTTPForbidden()
613 614
614 615 PullRequestModel().close_pull_request_with_comment(
615 616 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
616 617 Session().commit()
617 618
618 619 @LoginRequired()
619 620 @NotAnonymous()
620 621 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
621 622 'repository.admin')
622 623 @auth.CSRFRequired()
623 624 @jsonify
624 625 def delete(self, repo_name, pull_request_id):
625 626 pull_request_id = safe_int(pull_request_id)
626 627 pull_request = PullRequest.get_or_404(pull_request_id)
627 628 # only owner can delete it !
628 629 if pull_request.author.user_id == c.rhodecode_user.user_id:
629 630 PullRequestModel().delete(pull_request)
630 631 Session().commit()
631 632 h.flash(_('Successfully deleted pull request'),
632 633 category='success')
633 634 return redirect(url('my_account_pullrequests'))
634 635 raise HTTPForbidden()
635 636
636 637 @LoginRequired()
637 638 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
638 639 'repository.admin')
639 640 def show(self, repo_name, pull_request_id):
640 641 pull_request_id = safe_int(pull_request_id)
641 642 c.pull_request = PullRequest.get_or_404(pull_request_id)
642 643
643 644 # pull_requests repo_name we opened it against
644 645 # ie. target_repo must match
645 646 if repo_name != c.pull_request.target_repo.repo_name:
646 647 raise HTTPNotFound
647 648
648 649 c.allowed_to_change_status = PullRequestModel(). \
649 650 check_user_change_status(c.pull_request, c.rhodecode_user)
650 651 c.allowed_to_update = PullRequestModel().check_user_update(
651 652 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
652 653 c.allowed_to_merge = PullRequestModel().check_user_merge(
653 654 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
654 655
655 656 cc_model = ChangesetCommentsModel()
656 657
657 658 c.pull_request_reviewers = c.pull_request.reviewers_statuses()
658 659
659 660 c.pull_request_review_status = c.pull_request.calculated_review_status()
660 661 c.pr_merge_status, c.pr_merge_msg = PullRequestModel().merge_status(
661 662 c.pull_request)
662 663 c.approval_msg = None
663 664 if c.pull_request_review_status != ChangesetStatus.STATUS_APPROVED:
664 665 c.approval_msg = _('Reviewer approval is pending.')
665 666 c.pr_merge_status = False
666 667 # load compare data into template context
667 668 enable_comments = not c.pull_request.is_closed()
668 669 self._load_compare_data(c.pull_request, enable_comments=enable_comments)
669 670
670 671 # this is a hack to properly display links, when creating PR, the
671 672 # compare view and others uses different notation, and
672 673 # compare_commits.html renders links based on the target_repo.
673 674 # We need to swap that here to generate it properly on the html side
674 675 c.target_repo = c.source_repo
675 676
676 677 # inline comments
677 678 c.inline_cnt = 0
678 679 c.inline_comments = cc_model.get_inline_comments(
679 680 c.rhodecode_db_repo.repo_id,
680 681 pull_request=pull_request_id).items()
681 682 # count inline comments
682 683 for __, lines in c.inline_comments:
683 684 for comments in lines.values():
684 685 c.inline_cnt += len(comments)
685 686
686 687 # outdated comments
687 688 c.outdated_cnt = 0
688 689 if ChangesetCommentsModel.use_outdated_comments(c.pull_request):
689 690 c.outdated_comments = cc_model.get_outdated_comments(
690 691 c.rhodecode_db_repo.repo_id,
691 692 pull_request=c.pull_request)
692 693 # Count outdated comments and check for deleted files
693 694 for file_name, lines in c.outdated_comments.iteritems():
694 695 for comments in lines.values():
695 696 c.outdated_cnt += len(comments)
696 697 if file_name not in c.included_files:
697 698 c.deleted_files.append(file_name)
698 699 else:
699 700 c.outdated_comments = {}
700 701
701 702 # comments
702 703 c.comments = cc_model.get_comments(c.rhodecode_db_repo.repo_id,
703 704 pull_request=pull_request_id)
704 705
705 706 if c.allowed_to_update:
706 707 force_close = ('forced_closed', _('Close Pull Request'))
707 708 statuses = ChangesetStatus.STATUSES + [force_close]
708 709 else:
709 710 statuses = ChangesetStatus.STATUSES
710 711 c.commit_statuses = statuses
711 712
712 713 c.ancestor = None # TODO: add ancestor here
713 714
714 715 return render('/pullrequests/pullrequest_show.html')
715 716
716 717 @LoginRequired()
717 718 @NotAnonymous()
718 719 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
719 720 'repository.admin')
720 721 @auth.CSRFRequired()
721 722 @jsonify
722 723 def comment(self, repo_name, pull_request_id):
723 724 pull_request_id = safe_int(pull_request_id)
724 725 pull_request = PullRequest.get_or_404(pull_request_id)
725 726 if pull_request.is_closed():
726 727 raise HTTPForbidden()
727 728
728 729 # TODO: johbo: Re-think this bit, "approved_closed" does not exist
729 730 # as a changeset status, still we want to send it in one value.
730 731 status = request.POST.get('changeset_status', None)
731 732 text = request.POST.get('text')
732 733 if status and '_closed' in status:
733 734 close_pr = True
734 735 status = status.replace('_closed', '')
735 736 else:
736 737 close_pr = False
737 738
738 739 forced = (status == 'forced')
739 740 if forced:
740 741 status = 'rejected'
741 742
742 743 allowed_to_change_status = PullRequestModel().check_user_change_status(
743 744 pull_request, c.rhodecode_user)
744 745
745 746 if status and allowed_to_change_status:
746 747 message = (_('Status change %(transition_icon)s %(status)s')
747 748 % {'transition_icon': '>',
748 749 'status': ChangesetStatus.get_status_lbl(status)})
749 750 if close_pr:
750 751 message = _('Closing with') + ' ' + message
751 752 text = text or message
752 753 comm = ChangesetCommentsModel().create(
753 754 text=text,
754 755 repo=c.rhodecode_db_repo.repo_id,
755 756 user=c.rhodecode_user.user_id,
756 757 pull_request=pull_request_id,
757 758 f_path=request.POST.get('f_path'),
758 759 line_no=request.POST.get('line'),
759 760 status_change=(ChangesetStatus.get_status_lbl(status)
760 761 if status and allowed_to_change_status else None),
761 762 closing_pr=close_pr
762 763 )
763 764
764 765 if allowed_to_change_status:
765 766 old_calculated_status = pull_request.calculated_review_status()
766 767 # get status if set !
767 768 if status:
768 769 ChangesetStatusModel().set_status(
769 770 c.rhodecode_db_repo.repo_id,
770 771 status,
771 772 c.rhodecode_user.user_id,
772 773 comm,
773 774 pull_request=pull_request_id
774 775 )
775 776
776 777 Session().flush()
777 778 # we now calculate the status of pull request, and based on that
778 779 # calculation we set the commits status
779 780 calculated_status = pull_request.calculated_review_status()
780 781 if old_calculated_status != calculated_status:
781 782 PullRequestModel()._trigger_pull_request_hook(
782 783 pull_request, c.rhodecode_user, 'review_status_change')
783 784
784 785 calculated_status_lbl = ChangesetStatus.get_status_lbl(
785 786 calculated_status)
786 787
787 788 if close_pr:
788 789 status_completed = (
789 790 calculated_status in [ChangesetStatus.STATUS_APPROVED,
790 791 ChangesetStatus.STATUS_REJECTED])
791 792 if forced or status_completed:
792 793 PullRequestModel().close_pull_request(
793 794 pull_request_id, c.rhodecode_user)
794 795 else:
795 796 h.flash(_('Closing pull request on other statuses than '
796 797 'rejected or approved is forbidden. '
797 798 'Calculated status from all reviewers '
798 799 'is currently: %s') % calculated_status_lbl,
799 800 category='warning')
800 801
801 802 Session().commit()
802 803
803 804 if not request.is_xhr:
804 805 return redirect(h.url('pullrequest_show', repo_name=repo_name,
805 806 pull_request_id=pull_request_id))
806 807
807 808 data = {
808 809 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
809 810 }
810 811 if comm:
811 812 c.co = comm
812 813 data.update(comm.get_dict())
813 814 data.update({'rendered_text':
814 815 render('changeset/changeset_comment_block.html')})
815 816
816 817 return data
817 818
818 819 @LoginRequired()
819 820 @NotAnonymous()
820 821 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
821 822 'repository.admin')
822 823 @auth.CSRFRequired()
823 824 @jsonify
824 825 def delete_comment(self, repo_name, comment_id):
825 826 return self._delete_comment(comment_id)
826 827
827 828 def _delete_comment(self, comment_id):
828 829 comment_id = safe_int(comment_id)
829 830 co = ChangesetComment.get_or_404(comment_id)
830 831 if co.pull_request.is_closed():
831 832 # don't allow deleting comments on closed pull request
832 833 raise HTTPForbidden()
833 834
834 835 is_owner = co.author.user_id == c.rhodecode_user.user_id
835 836 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
836 837 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
837 838 old_calculated_status = co.pull_request.calculated_review_status()
838 839 ChangesetCommentsModel().delete(comment=co)
839 840 Session().commit()
840 841 calculated_status = co.pull_request.calculated_review_status()
841 842 if old_calculated_status != calculated_status:
842 843 PullRequestModel()._trigger_pull_request_hook(
843 844 co.pull_request, c.rhodecode_user, 'review_status_change')
844 845 return True
845 846 else:
846 847 raise HTTPForbidden()
@@ -1,59 +1,57 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from pyramid.threadlocal import get_current_registry
20 20
21 21
22 class RhodecodeEvent(object):
23 """
24 Base event class for all Rhodecode events
25 """
26
27
28 22 def trigger(event):
29 23 """
30 24 Helper method to send an event. This wraps the pyramid logic to send an
31 25 event.
32 26 """
33 27 # For the first step we are using pyramids thread locals here. If the
34 28 # event mechanism works out as a good solution we should think about
35 29 # passing the registry as an argument to get rid of it.
36 30 registry = get_current_registry()
37 31 registry.notify(event)
38 32
39 33
34 from rhodecode.events.base import RhodecodeEvent
35
40 36 from rhodecode.events.user import (
41 37 UserPreCreate,
42 38 UserPreUpdate,
43 39 UserRegistered
44 40 )
45 41
46 42 from rhodecode.events.repo import (
43 RepoEvent,
47 44 RepoPreCreateEvent, RepoCreatedEvent,
48 45 RepoPreDeleteEvent, RepoDeletedEvent,
49 46 RepoPrePushEvent, RepoPushEvent,
50 47 RepoPrePullEvent, RepoPullEvent,
51 48 )
52 49
53 50 from rhodecode.events.pullrequest import (
51 PullRequestEvent,
54 52 PullRequestCreateEvent,
55 53 PullRequestUpdateEvent,
56 54 PullRequestReviewEvent,
57 55 PullRequestMergeEvent,
58 56 PullRequestCloseEvent,
59 57 ) No newline at end of file
@@ -1,72 +1,97 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
20
19 21 from rhodecode.events.repo import RepoEvent
20 22
21 23
24 def get_pull_request_url(pull_request):
25 from rhodecode.model.pull_request import PullRequestModel
26 return PullRequestModel().get_url(pull_request)
27
28
29 class PullRequestSchema(Schema):
30 """
31 Marshmallow schema for a pull request
32 """
33 pull_request_id = fields.Integer()
34 url = fields.Function(get_pull_request_url)
35 title = fields.Str()
36
37
38 class PullRequestEventSchema(RepoEvent.MarshmallowSchema):
39 """
40 Marshmallow schema for a pull request event
41 """
42 pullrequest = fields.Nested(PullRequestSchema)
43
44
22 45 class PullRequestEvent(RepoEvent):
23 46 """
24 Base class for events acting on a repository.
47 Base class for pull request events.
25 48
26 :param repo: a :class:`Repository` instance
49 :param pullrequest: a :class:`PullRequest` instance
27 50 """
51 MarshmallowSchema = PullRequestEventSchema
52
28 53 def __init__(self, pullrequest):
29 54 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
30 55 self.pullrequest = pullrequest
31 56
32 57
33 58 class PullRequestCreateEvent(PullRequestEvent):
34 59 """
35 60 An instance of this class is emitted as an :term:`event` after a pull
36 61 request is created.
37 62 """
38 63 name = 'pullrequest-create'
39 64
40 65
41 66 class PullRequestCloseEvent(PullRequestEvent):
42 67 """
43 68 An instance of this class is emitted as an :term:`event` after a pull
44 69 request is closed.
45 70 """
46 71 name = 'pullrequest-close'
47 72
48 73
49 74 class PullRequestUpdateEvent(PullRequestEvent):
50 75 """
51 76 An instance of this class is emitted as an :term:`event` after a pull
52 77 request is updated.
53 78 """
54 79 name = 'pullrequest-update'
55 80
56 81
57 82 class PullRequestMergeEvent(PullRequestEvent):
58 83 """
59 84 An instance of this class is emitted as an :term:`event` after a pull
60 85 request is merged.
61 86 """
62 87 name = 'pullrequest-merge'
63 88
64 89
65 90 class PullRequestReviewEvent(PullRequestEvent):
66 91 """
67 92 An instance of this class is emitted as an :term:`event` after a pull
68 93 request is reviewed.
69 94 """
70 95 name = 'pullrequest-review'
71 96
72 97
@@ -1,113 +1,149 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
20
19 21 from rhodecode.model.db import Repository, Session
20 from rhodecode.events import RhodecodeEvent
22 from rhodecode.events.base import RhodecodeEvent
23
24
25 def get_pull_request_url(repo):
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
28
29
30 class RepositorySchema(Schema):
31 """
32 Marshmallow schema for a repository
33 """
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
36 url = fields.Function(get_pull_request_url)
37
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
41 Marshmallow schema for a repository event
42 """
43 repository = fields.Nested(RepositorySchema)
21 44
22 45
23 46 class RepoEvent(RhodecodeEvent):
24 47 """
25 48 Base class for events acting on a repository.
26 49
27 50 :param repo: a :class:`Repository` instance
28 51 """
52 MarshmallowSchema = RepoEventSchema
53
29 54 def __init__(self, repo):
55 super(RepoEvent, self).__init__()
30 56 self.repo = repo
31 57
32 58
33 59 class RepoPreCreateEvent(RepoEvent):
34 60 """
35 61 An instance of this class is emitted as an :term:`event` before a repo is
36 62 created.
37 63 """
38 64 name = 'repo-pre-create'
39 65
40 66
41 67 class RepoCreatedEvent(RepoEvent):
42 68 """
43 69 An instance of this class is emitted as an :term:`event` whenever a repo is
44 70 created.
45 71 """
46 72 name = 'repo-created'
47 73
48 74
49 75 class RepoPreDeleteEvent(RepoEvent):
50 76 """
51 77 An instance of this class is emitted as an :term:`event` whenever a repo is
52 78 created.
53 79 """
54 80 name = 'repo-pre-delete'
55 81
56 82
57 83 class RepoDeletedEvent(RepoEvent):
58 84 """
59 85 An instance of this class is emitted as an :term:`event` whenever a repo is
60 86 created.
61 87 """
62 88 name = 'repo-deleted'
63 89
64 90
65 91 class RepoVCSEvent(RepoEvent):
66 92 """
67 93 Base class for events triggered by the VCS
68 94 """
69 95 def __init__(self, repo_name, extras):
70 96 self.repo = Repository.get_by_repo_name(repo_name)
71 97 if not self.repo:
72 98 raise Exception('repo by this name %s does not exist' % repo_name)
73 99 self.extras = extras
74 100 super(RepoVCSEvent, self).__init__(self.repo)
75 101
102 @property
103 def acting_user(self):
104 if self.extras.get('username'):
105 return User.get_by_username(extras['username'])
106
107 @property
108 def acting_ip(self):
109 if self.extras.get('ip'):
110 return User.get_by_username(extras['ip'])
111
76 112
77 113 class RepoPrePullEvent(RepoVCSEvent):
78 114 """
79 115 An instance of this class is emitted as an :term:`event` before commits
80 116 are pulled from a repo.
81 117 """
82 118 name = 'repo-pre-pull'
83 119
84 120
85 121 class RepoPullEvent(RepoVCSEvent):
86 122 """
87 123 An instance of this class is emitted as an :term:`event` after commits
88 124 are pulled from a repo.
89 125 """
90 126 name = 'repo-pull'
91 127
92 128
93 129 class RepoPrePushEvent(RepoVCSEvent):
94 130 """
95 131 An instance of this class is emitted as an :term:`event` before commits
96 132 are pushed to a repo.
97 133 """
98 134 name = 'repo-pre-push'
99 135
100 136
101 137 class RepoPushEvent(RepoVCSEvent):
102 138 """
103 139 An instance of this class is emitted as an :term:`event` after commits
104 140 are pushed to a repo.
105 141
106 142 :param extras: (optional) dict of data from proxied VCS actions
107 143 """
108 144 name = 'repo-push'
109 145
110 146 def __init__(self, repo_name, pushed_commit_ids, extras):
111 147 super(RepoPushEvent, self).__init__(repo_name, extras)
112 148 self.pushed_commit_ids = pushed_commit_ids
113 149
@@ -1,54 +1,55 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from zope.interface import implementer
20 from rhodecode.events import RhodecodeEvent
20
21 from rhodecode.events.base import RhodecodeEvent
21 22 from rhodecode.events.interfaces import (
22 23 IUserRegistered, IUserPreCreate, IUserPreUpdate)
23 24
24 25
25 26 @implementer(IUserRegistered)
26 27 class UserRegistered(RhodecodeEvent):
27 28 """
28 29 An instance of this class is emitted as an :term:`event` whenever a user
29 30 account is registered.
30 31 """
31 32 def __init__(self, user, session):
32 33 self.user = user
33 34 self.session = session
34 35
35 36
36 37 @implementer(IUserPreCreate)
37 38 class UserPreCreate(RhodecodeEvent):
38 39 """
39 40 An instance of this class is emitted as an :term:`event` before a new user
40 41 object is created.
41 42 """
42 43 def __init__(self, user_data):
43 44 self.user_data = user_data
44 45
45 46
46 47 @implementer(IUserPreUpdate)
47 48 class UserPreUpdate(RhodecodeEvent):
48 49 """
49 50 An instance of this class is emitted as an :term:`event` before a user
50 51 object is updated.
51 52 """
52 53 def __init__(self, user, user_data):
53 54 self.user = user
54 55 self.user_data = user_data
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1148 +1,1153 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30
31 31 from pylons.i18n.translation import _
32 32 from pylons.i18n.translation import lazy_ugettext
33 33
34 34 import rhodecode
35 35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 38 from rhodecode.lib.markup_renderer import (
39 39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 40 from rhodecode.lib.utils import action_logger
41 41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 42 from rhodecode.lib.vcs.backends.base import (
43 43 Reference, MergeResponse, MergeFailureReason)
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 CommitDoesNotExistError, EmptyRepositoryError)
46 46 from rhodecode.model import BaseModel
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import ChangesetCommentsModel
49 49 from rhodecode.model.db import (
50 50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 51 PullRequestVersion, ChangesetComment)
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.notification import NotificationModel, \
54 54 EmailNotificationModel
55 55 from rhodecode.model.scm import ScmModel
56 56 from rhodecode.model.settings import VcsSettingsModel
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class PullRequestModel(BaseModel):
63 63
64 64 cls = PullRequest
65 65
66 66 DIFF_CONTEXT = 3
67 67
68 68 MERGE_STATUS_MESSAGES = {
69 69 MergeFailureReason.NONE: lazy_ugettext(
70 70 'This pull request can be automatically merged.'),
71 71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 72 'This pull request cannot be merged because of an unhandled'
73 73 ' exception.'),
74 74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 75 'This pull request cannot be merged because of conflicts.'),
76 76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 77 'This pull request could not be merged because push to target'
78 78 ' failed.'),
79 79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 80 'This pull request cannot be merged because the target is not a'
81 81 ' head.'),
82 82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 83 'This pull request cannot be merged because the source contains'
84 84 ' more branches than the target.'),
85 85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 86 'This pull request cannot be merged because the target has'
87 87 ' multiple heads.'),
88 88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 89 'This pull request cannot be merged because the target repository'
90 90 ' is locked.'),
91 91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 92 'This pull request cannot be merged because the target or the '
93 93 'source reference is missing.'),
94 94 }
95 95
96 96 def __get_pull_request(self, pull_request):
97 97 return self._get_instance(PullRequest, pull_request)
98 98
99 99 def _check_perms(self, perms, pull_request, user, api=False):
100 100 if not api:
101 101 return h.HasRepoPermissionAny(*perms)(
102 102 user=user, repo_name=pull_request.target_repo.repo_name)
103 103 else:
104 104 return h.HasRepoPermissionAnyApi(*perms)(
105 105 user=user, repo_name=pull_request.target_repo.repo_name)
106 106
107 107 def check_user_read(self, pull_request, user, api=False):
108 108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 109 return self._check_perms(_perms, pull_request, user, api)
110 110
111 111 def check_user_merge(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_update(self, pull_request, user, api=False):
116 116 owner = user.user_id == pull_request.user_id
117 117 return self.check_user_merge(pull_request, user, api) or owner
118 118
119 119 def check_user_change_status(self, pull_request, user, api=False):
120 120 reviewer = user.user_id in [x.user_id for x in
121 121 pull_request.reviewers]
122 122 return self.check_user_update(pull_request, user, api) or reviewer
123 123
124 124 def get(self, pull_request):
125 125 return self.__get_pull_request(pull_request)
126 126
127 127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 128 opened_by=None, order_by=None,
129 129 order_dir='desc'):
130 130 repo = self._get_repo(repo_name)
131 131 q = PullRequest.query()
132 132 # source or target
133 133 if source:
134 134 q = q.filter(PullRequest.source_repo == repo)
135 135 else:
136 136 q = q.filter(PullRequest.target_repo == repo)
137 137
138 138 # closed,opened
139 139 if statuses:
140 140 q = q.filter(PullRequest.status.in_(statuses))
141 141
142 142 # opened by filter
143 143 if opened_by:
144 144 q = q.filter(PullRequest.user_id.in_(opened_by))
145 145
146 146 if order_by:
147 147 order_map = {
148 148 'name_raw': PullRequest.pull_request_id,
149 149 'title': PullRequest.title,
150 150 'updated_on_raw': PullRequest.updated_on
151 151 }
152 152 if order_dir == 'asc':
153 153 q = q.order_by(order_map[order_by].asc())
154 154 else:
155 155 q = q.order_by(order_map[order_by].desc())
156 156
157 157 return q
158 158
159 159 def count_all(self, repo_name, source=False, statuses=None,
160 160 opened_by=None):
161 161 """
162 162 Count the number of pull requests for a specific repository.
163 163
164 164 :param repo_name: target or source repo
165 165 :param source: boolean flag to specify if repo_name refers to source
166 166 :param statuses: list of pull request statuses
167 167 :param opened_by: author user of the pull request
168 168 :returns: int number of pull requests
169 169 """
170 170 q = self._prepare_get_all_query(
171 171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172 172
173 173 return q.count()
174 174
175 175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 176 offset=0, length=None, order_by=None, order_dir='desc'):
177 177 """
178 178 Get all pull requests for a specific repository.
179 179
180 180 :param repo_name: target or source repo
181 181 :param source: boolean flag to specify if repo_name refers to source
182 182 :param statuses: list of pull request statuses
183 183 :param opened_by: author user of the pull request
184 184 :param offset: pagination offset
185 185 :param length: length of returned list
186 186 :param order_by: order of the returned list
187 187 :param order_dir: 'asc' or 'desc' ordering direction
188 188 :returns: list of pull requests
189 189 """
190 190 q = self._prepare_get_all_query(
191 191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 192 order_by=order_by, order_dir=order_dir)
193 193
194 194 if length:
195 195 pull_requests = q.limit(length).offset(offset).all()
196 196 else:
197 197 pull_requests = q.all()
198 198
199 199 return pull_requests
200 200
201 201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 202 opened_by=None):
203 203 """
204 204 Count the number of pull requests for a specific repository that are
205 205 awaiting review.
206 206
207 207 :param repo_name: target or source repo
208 208 :param source: boolean flag to specify if repo_name refers to source
209 209 :param statuses: list of pull request statuses
210 210 :param opened_by: author user of the pull request
211 211 :returns: int number of pull requests
212 212 """
213 213 pull_requests = self.get_awaiting_review(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215 215
216 216 return len(pull_requests)
217 217
218 218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 219 opened_by=None, offset=0, length=None,
220 220 order_by=None, order_dir='desc'):
221 221 """
222 222 Get all pull requests for a specific repository that are awaiting
223 223 review.
224 224
225 225 :param repo_name: target or source repo
226 226 :param source: boolean flag to specify if repo_name refers to source
227 227 :param statuses: list of pull request statuses
228 228 :param opened_by: author user of the pull request
229 229 :param offset: pagination offset
230 230 :param length: length of returned list
231 231 :param order_by: order of the returned list
232 232 :param order_dir: 'asc' or 'desc' ordering direction
233 233 :returns: list of pull requests
234 234 """
235 235 pull_requests = self.get_all(
236 236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 237 order_by=order_by, order_dir=order_dir)
238 238
239 239 _filtered_pull_requests = []
240 240 for pr in pull_requests:
241 241 status = pr.calculated_review_status()
242 242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 244 _filtered_pull_requests.append(pr)
245 245 if length:
246 246 return _filtered_pull_requests[offset:offset+length]
247 247 else:
248 248 return _filtered_pull_requests
249 249
250 250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None, user_id=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review from a specific user.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :param user_id: reviewer user of the pull request
261 261 :returns: int number of pull requests
262 262 """
263 263 pull_requests = self.get_awaiting_my_review(
264 264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 265 user_id=user_id)
266 266
267 267 return len(pull_requests)
268 268
269 269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 270 opened_by=None, user_id=None, offset=0,
271 271 length=None, order_by=None, order_dir='desc'):
272 272 """
273 273 Get all pull requests for a specific repository that are awaiting
274 274 review from a specific user.
275 275
276 276 :param repo_name: target or source repo
277 277 :param source: boolean flag to specify if repo_name refers to source
278 278 :param statuses: list of pull request statuses
279 279 :param opened_by: author user of the pull request
280 280 :param user_id: reviewer user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _my = PullRequestModel().get_not_reviewed(user_id)
292 292 my_participation = []
293 293 for pr in pull_requests:
294 294 if pr in _my:
295 295 my_participation.append(pr)
296 296 _filtered_pull_requests = my_participation
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def get_not_reviewed(self, user_id):
303 303 return [
304 304 x.pull_request for x in PullRequestReviewers.query().filter(
305 305 PullRequestReviewers.user_id == user_id).all()
306 306 ]
307 307
308 308 def get_versions(self, pull_request):
309 309 """
310 310 returns version of pull request sorted by ID descending
311 311 """
312 312 return PullRequestVersion.query()\
313 313 .filter(PullRequestVersion.pull_request == pull_request)\
314 314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 315 .all()
316 316
317 317 def create(self, created_by, source_repo, source_ref, target_repo,
318 318 target_ref, revisions, reviewers, title, description=None):
319 319 created_by_user = self._get_user(created_by)
320 320 source_repo = self._get_repo(source_repo)
321 321 target_repo = self._get_repo(target_repo)
322 322
323 323 pull_request = PullRequest()
324 324 pull_request.source_repo = source_repo
325 325 pull_request.source_ref = source_ref
326 326 pull_request.target_repo = target_repo
327 327 pull_request.target_ref = target_ref
328 328 pull_request.revisions = revisions
329 329 pull_request.title = title
330 330 pull_request.description = description
331 331 pull_request.author = created_by_user
332 332
333 333 Session().add(pull_request)
334 334 Session().flush()
335 335
336 336 # members / reviewers
337 337 for user_id in set(reviewers):
338 338 user = self._get_user(user_id)
339 339 reviewer = PullRequestReviewers(user, pull_request)
340 340 Session().add(reviewer)
341 341
342 342 # Set approval status to "Under Review" for all commits which are
343 343 # part of this pull request.
344 344 ChangesetStatusModel().set_status(
345 345 repo=target_repo,
346 346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 347 user=created_by_user,
348 348 pull_request=pull_request
349 349 )
350 350
351 351 self.notify_reviewers(pull_request, reviewers)
352 352 self._trigger_pull_request_hook(
353 353 pull_request, created_by_user, 'create')
354 354
355 355 return pull_request
356 356
357 357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 358 pull_request = self.__get_pull_request(pull_request)
359 359 target_scm = pull_request.target_repo.scm_instance()
360 360 if action == 'create':
361 361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 362 elif action == 'merge':
363 363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 364 elif action == 'close':
365 365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 366 elif action == 'review_status_change':
367 367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 368 elif action == 'update':
369 369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 370 else:
371 371 return
372 372
373 373 trigger_hook(
374 374 username=user.username,
375 375 repo_name=pull_request.target_repo.repo_name,
376 376 repo_alias=target_scm.alias,
377 377 pull_request=pull_request)
378 378
379 379 def _get_commit_ids(self, pull_request):
380 380 """
381 381 Return the commit ids of the merged pull request.
382 382
383 383 This method is not dealing correctly yet with the lack of autoupdates
384 384 nor with the implicit target updates.
385 385 For example: if a commit in the source repo is already in the target it
386 386 will be reported anyways.
387 387 """
388 388 merge_rev = pull_request.merge_rev
389 389 if merge_rev is None:
390 390 raise ValueError('This pull request was not merged yet')
391 391
392 392 commit_ids = list(pull_request.revisions)
393 393 if merge_rev not in commit_ids:
394 394 commit_ids.append(merge_rev)
395 395
396 396 return commit_ids
397 397
398 398 def merge(self, pull_request, user, extras):
399 399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 401 if merge_state.executed:
402 402 log.debug(
403 403 "Merge was successful, updating the pull request comments.")
404 404 self._comment_and_close_pr(pull_request, user, merge_state)
405 405 self._log_action('user_merged_pull_request', user, pull_request)
406 406 else:
407 407 log.warn("Merge failed, not updating the pull request.")
408 408 return merge_state
409 409
410 410 def _merge_pull_request(self, pull_request, user, extras):
411 411 target_vcs = pull_request.target_repo.scm_instance()
412 412 source_vcs = pull_request.source_repo.scm_instance()
413 413 target_ref = self._refresh_reference(
414 414 pull_request.target_ref_parts, target_vcs)
415 415
416 416 message = _(
417 417 'Merge pull request #%(pr_id)s from '
418 418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 419 'pr_id': pull_request.pull_request_id,
420 420 'source_repo': source_vcs.name,
421 421 'source_ref_name': pull_request.source_ref_parts.name,
422 422 'pr_title': pull_request.title
423 423 }
424 424
425 425 workspace_id = self._workspace_id(pull_request)
426 426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 428 use_rebase = self._use_rebase_for_merging(pull_request)
429 429
430 430 callback_daemon, extras = prepare_callback_daemon(
431 431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432 432
433 433 with callback_daemon:
434 434 # TODO: johbo: Implement a clean way to run a config_override
435 435 # for a single call.
436 436 target_vcs.config.set(
437 437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 438 merge_state = target_vcs.merge(
439 439 target_ref, source_vcs, pull_request.source_ref_parts,
440 440 workspace_id, user_name=user.username,
441 441 user_email=user.email, message=message, use_rebase=use_rebase)
442 442 return merge_state
443 443
444 444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 445 pull_request.merge_rev = merge_state.merge_commit_id
446 446 pull_request.updated_on = datetime.datetime.now()
447 447
448 448 ChangesetCommentsModel().create(
449 449 text=unicode(_('Pull request merged and closed')),
450 450 repo=pull_request.target_repo.repo_id,
451 451 user=user.user_id,
452 452 pull_request=pull_request.pull_request_id,
453 453 f_path=None,
454 454 line_no=None,
455 455 closing_pr=True
456 456 )
457 457
458 458 Session().add(pull_request)
459 459 Session().flush()
460 460 # TODO: paris: replace invalidation with less radical solution
461 461 ScmModel().mark_for_invalidation(
462 462 pull_request.target_repo.repo_name)
463 463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464 464
465 465 def has_valid_update_type(self, pull_request):
466 466 source_ref_type = pull_request.source_ref_parts.type
467 467 return source_ref_type in ['book', 'branch', 'tag']
468 468
469 469 def update_commits(self, pull_request):
470 470 """
471 471 Get the updated list of commits for the pull request
472 472 and return the new pull request version and the list
473 473 of commits processed by this update action
474 474 """
475 475
476 476 pull_request = self.__get_pull_request(pull_request)
477 477 source_ref_type = pull_request.source_ref_parts.type
478 478 source_ref_name = pull_request.source_ref_parts.name
479 479 source_ref_id = pull_request.source_ref_parts.commit_id
480 480
481 481 if not self.has_valid_update_type(pull_request):
482 482 log.debug(
483 483 "Skipping update of pull request %s due to ref type: %s",
484 484 pull_request, source_ref_type)
485 485 return (None, None)
486 486
487 487 source_repo = pull_request.source_repo.scm_instance()
488 488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 489 if source_ref_id == source_commit.raw_id:
490 490 log.debug("Nothing changed in pull request %s", pull_request)
491 491 return (None, None)
492 492
493 493 # Finally there is a need for an update
494 494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 495 self._link_comments_to_version(pull_request_version)
496 496
497 497 target_ref_type = pull_request.target_ref_parts.type
498 498 target_ref_name = pull_request.target_ref_parts.name
499 499 target_ref_id = pull_request.target_ref_parts.commit_id
500 500 target_repo = pull_request.target_repo.scm_instance()
501 501
502 502 if target_ref_type in ('tag', 'branch', 'book'):
503 503 target_commit = target_repo.get_commit(target_ref_name)
504 504 else:
505 505 target_commit = target_repo.get_commit(target_ref_id)
506 506
507 507 # re-compute commit ids
508 508 old_commit_ids = set(pull_request.revisions)
509 509 pre_load = ["author", "branch", "date", "message"]
510 510 commit_ranges = target_repo.compare(
511 511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 512 pre_load=pre_load)
513 513
514 514 ancestor = target_repo.get_common_ancestor(
515 515 target_commit.raw_id, source_commit.raw_id, source_repo)
516 516
517 517 pull_request.source_ref = '%s:%s:%s' % (
518 518 source_ref_type, source_ref_name, source_commit.raw_id)
519 519 pull_request.target_ref = '%s:%s:%s' % (
520 520 target_ref_type, target_ref_name, ancestor)
521 521 pull_request.revisions = [
522 522 commit.raw_id for commit in reversed(commit_ranges)]
523 523 pull_request.updated_on = datetime.datetime.now()
524 524 Session().add(pull_request)
525 525 new_commit_ids = set(pull_request.revisions)
526 526
527 527 changes = self._calculate_commit_id_changes(
528 528 old_commit_ids, new_commit_ids)
529 529
530 530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 531 pull_request, pull_request_version)
532 532
533 533 ChangesetCommentsModel().outdate_comments(
534 534 pull_request, old_diff_data=old_diff_data,
535 535 new_diff_data=new_diff_data)
536 536
537 537 file_changes = self._calculate_file_changes(
538 538 old_diff_data, new_diff_data)
539 539
540 540 # Add an automatic comment to the pull request
541 541 update_comment = ChangesetCommentsModel().create(
542 542 text=self._render_update_message(changes, file_changes),
543 543 repo=pull_request.target_repo,
544 544 user=pull_request.author,
545 545 pull_request=pull_request,
546 546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547 547
548 548 # Update status to "Under Review" for added commits
549 549 for commit_id in changes.added:
550 550 ChangesetStatusModel().set_status(
551 551 repo=pull_request.source_repo,
552 552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 553 comment=update_comment,
554 554 user=pull_request.author,
555 555 pull_request=pull_request,
556 556 revision=commit_id)
557 557
558 558 log.debug(
559 559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 560 'removed_ids: %s', pull_request.pull_request_id,
561 561 changes.added, changes.common, changes.removed)
562 562 log.debug('Updated pull request with the following file changes: %s',
563 563 file_changes)
564 564
565 565 log.info(
566 566 "Updated pull request %s from commit %s to commit %s, "
567 567 "stored new version %s of this pull request.",
568 568 pull_request.pull_request_id, source_ref_id,
569 569 pull_request.source_ref_parts.commit_id,
570 570 pull_request_version.pull_request_version_id)
571 571 Session().commit()
572 572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 573 'update')
574 574 return (pull_request_version, changes)
575 575
576 576 def _create_version_from_snapshot(self, pull_request):
577 577 version = PullRequestVersion()
578 578 version.title = pull_request.title
579 579 version.description = pull_request.description
580 580 version.status = pull_request.status
581 581 version.created_on = pull_request.created_on
582 582 version.updated_on = pull_request.updated_on
583 583 version.user_id = pull_request.user_id
584 584 version.source_repo = pull_request.source_repo
585 585 version.source_ref = pull_request.source_ref
586 586 version.target_repo = pull_request.target_repo
587 587 version.target_ref = pull_request.target_ref
588 588
589 589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 591 version._last_merge_status = pull_request._last_merge_status
592 592 version.merge_rev = pull_request.merge_rev
593 593
594 594 version.revisions = pull_request.revisions
595 595 version.pull_request = pull_request
596 596 Session().add(version)
597 597 Session().flush()
598 598
599 599 return version
600 600
601 601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 602 diff_context = (
603 603 self.DIFF_CONTEXT +
604 604 ChangesetCommentsModel.needed_extra_diff_context())
605 605 old_diff = self._get_diff_from_pr_or_version(
606 606 pull_request_version, context=diff_context)
607 607 new_diff = self._get_diff_from_pr_or_version(
608 608 pull_request, context=diff_context)
609 609
610 610 old_diff_data = diffs.DiffProcessor(old_diff)
611 611 old_diff_data.prepare()
612 612 new_diff_data = diffs.DiffProcessor(new_diff)
613 613 new_diff_data.prepare()
614 614
615 615 return old_diff_data, new_diff_data
616 616
617 617 def _link_comments_to_version(self, pull_request_version):
618 618 """
619 619 Link all unlinked comments of this pull request to the given version.
620 620
621 621 :param pull_request_version: The `PullRequestVersion` to which
622 622 the comments shall be linked.
623 623
624 624 """
625 625 pull_request = pull_request_version.pull_request
626 626 comments = ChangesetComment.query().filter(
627 627 # TODO: johbo: Should we query for the repo at all here?
628 628 # Pending decision on how comments of PRs are to be related
629 629 # to either the source repo, the target repo or no repo at all.
630 630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 631 ChangesetComment.pull_request == pull_request,
632 632 ChangesetComment.pull_request_version == None)
633 633
634 634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 635 # operation.
636 636 for comment in comments:
637 637 comment.pull_request_version_id = (
638 638 pull_request_version.pull_request_version_id)
639 639 Session().add(comment)
640 640
641 641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 642 added = new_ids.difference(old_ids)
643 643 common = old_ids.intersection(new_ids)
644 644 removed = old_ids.difference(new_ids)
645 645 return ChangeTuple(added, common, removed)
646 646
647 647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648 648
649 649 old_files = OrderedDict()
650 650 for diff_data in old_diff_data.parsed_diff:
651 651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652 652
653 653 added_files = []
654 654 modified_files = []
655 655 removed_files = []
656 656 for diff_data in new_diff_data.parsed_diff:
657 657 new_filename = diff_data['filename']
658 658 new_hash = md5_safe(diff_data['raw_diff'])
659 659
660 660 old_hash = old_files.get(new_filename)
661 661 if not old_hash:
662 662 # file is not present in old diff, means it's added
663 663 added_files.append(new_filename)
664 664 else:
665 665 if new_hash != old_hash:
666 666 modified_files.append(new_filename)
667 667 # now remove a file from old, since we have seen it already
668 668 del old_files[new_filename]
669 669
670 670 # removed files is when there are present in old, but not in NEW,
671 671 # since we remove old files that are present in new diff, left-overs
672 672 # if any should be the removed files
673 673 removed_files.extend(old_files.keys())
674 674
675 675 return FileChangeTuple(added_files, modified_files, removed_files)
676 676
677 677 def _render_update_message(self, changes, file_changes):
678 678 """
679 679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 680 so it's always looking the same disregarding on which default
681 681 renderer system is using.
682 682
683 683 :param changes: changes named tuple
684 684 :param file_changes: file changes named tuple
685 685
686 686 """
687 687 new_status = ChangesetStatus.get_status_lbl(
688 688 ChangesetStatus.STATUS_UNDER_REVIEW)
689 689
690 690 changed_files = (
691 691 file_changes.added + file_changes.modified + file_changes.removed)
692 692
693 693 params = {
694 694 'under_review_label': new_status,
695 695 'added_commits': changes.added,
696 696 'removed_commits': changes.removed,
697 697 'changed_files': changed_files,
698 698 'added_files': file_changes.added,
699 699 'modified_files': file_changes.modified,
700 700 'removed_files': file_changes.removed,
701 701 }
702 702 renderer = RstTemplateRenderer()
703 703 return renderer.render('pull_request_update.mako', **params)
704 704
705 705 def edit(self, pull_request, title, description):
706 706 pull_request = self.__get_pull_request(pull_request)
707 707 if pull_request.is_closed():
708 708 raise ValueError('This pull request is closed')
709 709 if title:
710 710 pull_request.title = title
711 711 pull_request.description = description
712 712 pull_request.updated_on = datetime.datetime.now()
713 713 Session().add(pull_request)
714 714
715 715 def update_reviewers(self, pull_request, reviewers_ids):
716 716 reviewers_ids = set(reviewers_ids)
717 717 pull_request = self.__get_pull_request(pull_request)
718 718 current_reviewers = PullRequestReviewers.query()\
719 719 .filter(PullRequestReviewers.pull_request ==
720 720 pull_request).all()
721 721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722 722
723 723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725 725
726 726 log.debug("Adding %s reviewers", ids_to_add)
727 727 log.debug("Removing %s reviewers", ids_to_remove)
728 728 changed = False
729 729 for uid in ids_to_add:
730 730 changed = True
731 731 _usr = self._get_user(uid)
732 732 reviewer = PullRequestReviewers(_usr, pull_request)
733 733 Session().add(reviewer)
734 734
735 735 self.notify_reviewers(pull_request, ids_to_add)
736 736
737 737 for uid in ids_to_remove:
738 738 changed = True
739 739 reviewer = PullRequestReviewers.query()\
740 740 .filter(PullRequestReviewers.user_id == uid,
741 741 PullRequestReviewers.pull_request == pull_request)\
742 742 .scalar()
743 743 if reviewer:
744 744 Session().delete(reviewer)
745 745 if changed:
746 746 pull_request.updated_on = datetime.datetime.now()
747 747 Session().add(pull_request)
748 748
749 749 return ids_to_add, ids_to_remove
750 750
751 def get_url(self, pull_request):
752 return url('pullrequest_show', repo_name=self.target_repo.repo_name,
753 pull_request_id=self.pull_request_id,
754 qualified=True)
755
751 756 def notify_reviewers(self, pull_request, reviewers_ids):
752 757 # notification to reviewers
753 758 if not reviewers_ids:
754 759 return
755 760
756 761 pull_request_obj = pull_request
757 762 # get the current participants of this pull request
758 763 recipients = reviewers_ids
759 764 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
760 765
761 766 pr_source_repo = pull_request_obj.source_repo
762 767 pr_target_repo = pull_request_obj.target_repo
763 768
764 769 pr_url = h.url(
765 770 'pullrequest_show',
766 771 repo_name=pr_target_repo.repo_name,
767 772 pull_request_id=pull_request_obj.pull_request_id,
768 773 qualified=True,)
769 774
770 775 # set some variables for email notification
771 776 pr_target_repo_url = h.url(
772 777 'summary_home',
773 778 repo_name=pr_target_repo.repo_name,
774 779 qualified=True)
775 780
776 781 pr_source_repo_url = h.url(
777 782 'summary_home',
778 783 repo_name=pr_source_repo.repo_name,
779 784 qualified=True)
780 785
781 786 # pull request specifics
782 787 pull_request_commits = [
783 788 (x.raw_id, x.message)
784 789 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
785 790
786 791 kwargs = {
787 792 'user': pull_request.author,
788 793 'pull_request': pull_request_obj,
789 794 'pull_request_commits': pull_request_commits,
790 795
791 796 'pull_request_target_repo': pr_target_repo,
792 797 'pull_request_target_repo_url': pr_target_repo_url,
793 798
794 799 'pull_request_source_repo': pr_source_repo,
795 800 'pull_request_source_repo_url': pr_source_repo_url,
796 801
797 802 'pull_request_url': pr_url,
798 803 }
799 804
800 805 # pre-generate the subject for notification itself
801 806 (subject,
802 807 _h, _e, # we don't care about those
803 808 body_plaintext) = EmailNotificationModel().render_email(
804 809 notification_type, **kwargs)
805 810
806 811 # create notification objects, and emails
807 812 NotificationModel().create(
808 813 created_by=pull_request.author,
809 814 notification_subject=subject,
810 815 notification_body=body_plaintext,
811 816 notification_type=notification_type,
812 817 recipients=recipients,
813 818 email_kwargs=kwargs,
814 819 )
815 820
816 821 def delete(self, pull_request):
817 822 pull_request = self.__get_pull_request(pull_request)
818 823 self._cleanup_merge_workspace(pull_request)
819 824 Session().delete(pull_request)
820 825
821 826 def close_pull_request(self, pull_request, user):
822 827 pull_request = self.__get_pull_request(pull_request)
823 828 self._cleanup_merge_workspace(pull_request)
824 829 pull_request.status = PullRequest.STATUS_CLOSED
825 830 pull_request.updated_on = datetime.datetime.now()
826 831 Session().add(pull_request)
827 832 self._trigger_pull_request_hook(
828 833 pull_request, pull_request.author, 'close')
829 834 self._log_action('user_closed_pull_request', user, pull_request)
830 835
831 836 def close_pull_request_with_comment(self, pull_request, user, repo,
832 837 message=None):
833 838 status = ChangesetStatus.STATUS_REJECTED
834 839
835 840 if not message:
836 841 message = (
837 842 _('Status change %(transition_icon)s %(status)s') % {
838 843 'transition_icon': '>',
839 844 'status': ChangesetStatus.get_status_lbl(status)})
840 845
841 846 internal_message = _('Closing with') + ' ' + message
842 847
843 848 comm = ChangesetCommentsModel().create(
844 849 text=internal_message,
845 850 repo=repo.repo_id,
846 851 user=user.user_id,
847 852 pull_request=pull_request.pull_request_id,
848 853 f_path=None,
849 854 line_no=None,
850 855 status_change=ChangesetStatus.get_status_lbl(status),
851 856 closing_pr=True
852 857 )
853 858
854 859 ChangesetStatusModel().set_status(
855 860 repo.repo_id,
856 861 status,
857 862 user.user_id,
858 863 comm,
859 864 pull_request=pull_request.pull_request_id
860 865 )
861 866 Session().flush()
862 867
863 868 PullRequestModel().close_pull_request(
864 869 pull_request.pull_request_id, user)
865 870
866 871 def merge_status(self, pull_request):
867 872 if not self._is_merge_enabled(pull_request):
868 873 return False, _('Server-side pull request merging is disabled.')
869 874 if pull_request.is_closed():
870 875 return False, _('This pull request is closed.')
871 876 merge_possible, msg = self._check_repo_requirements(
872 877 target=pull_request.target_repo, source=pull_request.source_repo)
873 878 if not merge_possible:
874 879 return merge_possible, msg
875 880
876 881 try:
877 882 resp = self._try_merge(pull_request)
878 883 status = resp.possible, self.merge_status_message(
879 884 resp.failure_reason)
880 885 except NotImplementedError:
881 886 status = False, _('Pull request merging is not supported.')
882 887
883 888 return status
884 889
885 890 def _check_repo_requirements(self, target, source):
886 891 """
887 892 Check if `target` and `source` have compatible requirements.
888 893
889 894 Currently this is just checking for largefiles.
890 895 """
891 896 target_has_largefiles = self._has_largefiles(target)
892 897 source_has_largefiles = self._has_largefiles(source)
893 898 merge_possible = True
894 899 message = u''
895 900
896 901 if target_has_largefiles != source_has_largefiles:
897 902 merge_possible = False
898 903 if source_has_largefiles:
899 904 message = _(
900 905 'Target repository large files support is disabled.')
901 906 else:
902 907 message = _(
903 908 'Source repository large files support is disabled.')
904 909
905 910 return merge_possible, message
906 911
907 912 def _has_largefiles(self, repo):
908 913 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
909 914 'extensions', 'largefiles')
910 915 return largefiles_ui and largefiles_ui[0].active
911 916
912 917 def _try_merge(self, pull_request):
913 918 """
914 919 Try to merge the pull request and return the merge status.
915 920 """
916 921 log.debug(
917 922 "Trying out if the pull request %s can be merged.",
918 923 pull_request.pull_request_id)
919 924 target_vcs = pull_request.target_repo.scm_instance()
920 925 target_ref = self._refresh_reference(
921 926 pull_request.target_ref_parts, target_vcs)
922 927
923 928 target_locked = pull_request.target_repo.locked
924 929 if target_locked and target_locked[0]:
925 930 log.debug("The target repository is locked.")
926 931 merge_state = MergeResponse(
927 932 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
928 933 elif self._needs_merge_state_refresh(pull_request, target_ref):
929 934 log.debug("Refreshing the merge status of the repository.")
930 935 merge_state = self._refresh_merge_state(
931 936 pull_request, target_vcs, target_ref)
932 937 else:
933 938 possible = pull_request.\
934 939 _last_merge_status == MergeFailureReason.NONE
935 940 merge_state = MergeResponse(
936 941 possible, False, None, pull_request._last_merge_status)
937 942 log.debug("Merge response: %s", merge_state)
938 943 return merge_state
939 944
940 945 def _refresh_reference(self, reference, vcs_repository):
941 946 if reference.type in ('branch', 'book'):
942 947 name_or_id = reference.name
943 948 else:
944 949 name_or_id = reference.commit_id
945 950 refreshed_commit = vcs_repository.get_commit(name_or_id)
946 951 refreshed_reference = Reference(
947 952 reference.type, reference.name, refreshed_commit.raw_id)
948 953 return refreshed_reference
949 954
950 955 def _needs_merge_state_refresh(self, pull_request, target_reference):
951 956 return not(
952 957 pull_request.revisions and
953 958 pull_request.revisions[0] == pull_request._last_merge_source_rev and
954 959 target_reference.commit_id == pull_request._last_merge_target_rev)
955 960
956 961 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
957 962 workspace_id = self._workspace_id(pull_request)
958 963 source_vcs = pull_request.source_repo.scm_instance()
959 964 use_rebase = self._use_rebase_for_merging(pull_request)
960 965 merge_state = target_vcs.merge(
961 966 target_reference, source_vcs, pull_request.source_ref_parts,
962 967 workspace_id, dry_run=True, use_rebase=use_rebase)
963 968
964 969 # Do not store the response if there was an unknown error.
965 970 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
966 971 pull_request._last_merge_source_rev = pull_request.\
967 972 source_ref_parts.commit_id
968 973 pull_request._last_merge_target_rev = target_reference.commit_id
969 974 pull_request._last_merge_status = (
970 975 merge_state.failure_reason)
971 976 Session().add(pull_request)
972 977 Session().flush()
973 978
974 979 return merge_state
975 980
976 981 def _workspace_id(self, pull_request):
977 982 workspace_id = 'pr-%s' % pull_request.pull_request_id
978 983 return workspace_id
979 984
980 985 def merge_status_message(self, status_code):
981 986 """
982 987 Return a human friendly error message for the given merge status code.
983 988 """
984 989 return self.MERGE_STATUS_MESSAGES[status_code]
985 990
986 991 def generate_repo_data(self, repo, commit_id=None, branch=None,
987 992 bookmark=None):
988 993 all_refs, selected_ref = \
989 994 self._get_repo_pullrequest_sources(
990 995 repo.scm_instance(), commit_id=commit_id,
991 996 branch=branch, bookmark=bookmark)
992 997
993 998 refs_select2 = []
994 999 for element in all_refs:
995 1000 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
996 1001 refs_select2.append({'text': element[1], 'children': children})
997 1002
998 1003 return {
999 1004 'user': {
1000 1005 'user_id': repo.user.user_id,
1001 1006 'username': repo.user.username,
1002 1007 'firstname': repo.user.firstname,
1003 1008 'lastname': repo.user.lastname,
1004 1009 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1005 1010 },
1006 1011 'description': h.chop_at_smart(repo.description, '\n'),
1007 1012 'refs': {
1008 1013 'all_refs': all_refs,
1009 1014 'selected_ref': selected_ref,
1010 1015 'select2_refs': refs_select2
1011 1016 }
1012 1017 }
1013 1018
1014 1019 def generate_pullrequest_title(self, source, source_ref, target):
1015 1020 return '{source}#{at_ref} to {target}'.format(
1016 1021 source=source,
1017 1022 at_ref=source_ref,
1018 1023 target=target,
1019 1024 )
1020 1025
1021 1026 def _cleanup_merge_workspace(self, pull_request):
1022 1027 # Merging related cleanup
1023 1028 target_scm = pull_request.target_repo.scm_instance()
1024 1029 workspace_id = 'pr-%s' % pull_request.pull_request_id
1025 1030
1026 1031 try:
1027 1032 target_scm.cleanup_merge_workspace(workspace_id)
1028 1033 except NotImplementedError:
1029 1034 pass
1030 1035
1031 1036 def _get_repo_pullrequest_sources(
1032 1037 self, repo, commit_id=None, branch=None, bookmark=None):
1033 1038 """
1034 1039 Return a structure with repo's interesting commits, suitable for
1035 1040 the selectors in pullrequest controller
1036 1041
1037 1042 :param commit_id: a commit that must be in the list somehow
1038 1043 and selected by default
1039 1044 :param branch: a branch that must be in the list and selected
1040 1045 by default - even if closed
1041 1046 :param bookmark: a bookmark that must be in the list and selected
1042 1047 """
1043 1048
1044 1049 commit_id = safe_str(commit_id) if commit_id else None
1045 1050 branch = safe_str(branch) if branch else None
1046 1051 bookmark = safe_str(bookmark) if bookmark else None
1047 1052
1048 1053 selected = None
1049 1054
1050 1055 # order matters: first source that has commit_id in it will be selected
1051 1056 sources = []
1052 1057 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1053 1058 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1054 1059
1055 1060 if commit_id:
1056 1061 ref_commit = (h.short_id(commit_id), commit_id)
1057 1062 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1058 1063
1059 1064 sources.append(
1060 1065 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1061 1066 )
1062 1067
1063 1068 groups = []
1064 1069 for group_key, ref_list, group_name, match in sources:
1065 1070 group_refs = []
1066 1071 for ref_name, ref_id in ref_list:
1067 1072 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1068 1073 group_refs.append((ref_key, ref_name))
1069 1074
1070 1075 if not selected:
1071 1076 if set([commit_id, match]) & set([ref_id, ref_name]):
1072 1077 selected = ref_key
1073 1078
1074 1079 if group_refs:
1075 1080 groups.append((group_refs, group_name))
1076 1081
1077 1082 if not selected:
1078 1083 ref = commit_id or branch or bookmark
1079 1084 if ref:
1080 1085 raise CommitDoesNotExistError(
1081 1086 'No commit refs could be found matching: %s' % ref)
1082 1087 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1083 1088 selected = 'branch:%s:%s' % (
1084 1089 repo.DEFAULT_BRANCH_NAME,
1085 1090 repo.branches[repo.DEFAULT_BRANCH_NAME]
1086 1091 )
1087 1092 elif repo.commit_ids:
1088 1093 rev = repo.commit_ids[0]
1089 1094 selected = 'rev:%s:%s' % (rev, rev)
1090 1095 else:
1091 1096 raise EmptyRepositoryError()
1092 1097 return groups, selected
1093 1098
1094 1099 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1095 1100 pull_request = self.__get_pull_request(pull_request)
1096 1101 return self._get_diff_from_pr_or_version(pull_request, context=context)
1097 1102
1098 1103 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1099 1104 source_repo = pr_or_version.source_repo
1100 1105
1101 1106 # we swap org/other ref since we run a simple diff on one repo
1102 1107 target_ref_id = pr_or_version.target_ref_parts.commit_id
1103 1108 source_ref_id = pr_or_version.source_ref_parts.commit_id
1104 1109 target_commit = source_repo.get_commit(
1105 1110 commit_id=safe_str(target_ref_id))
1106 1111 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1107 1112 vcs_repo = source_repo.scm_instance()
1108 1113
1109 1114 # TODO: johbo: In the context of an update, we cannot reach
1110 1115 # the old commit anymore with our normal mechanisms. It needs
1111 1116 # some sort of special support in the vcs layer to avoid this
1112 1117 # workaround.
1113 1118 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1114 1119 vcs_repo.alias == 'git'):
1115 1120 source_commit.raw_id = safe_str(source_ref_id)
1116 1121
1117 1122 log.debug('calculating diff between '
1118 1123 'source_ref:%s and target_ref:%s for repo `%s`',
1119 1124 target_ref_id, source_ref_id,
1120 1125 safe_unicode(vcs_repo.path))
1121 1126
1122 1127 vcs_diff = vcs_repo.get_diff(
1123 1128 commit1=target_commit, commit2=source_commit, context=context)
1124 1129 return vcs_diff
1125 1130
1126 1131 def _is_merge_enabled(self, pull_request):
1127 1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1128 1133 settings = settings_model.get_general_settings()
1129 1134 return settings.get('rhodecode_pr_merge_enabled', False)
1130 1135
1131 1136 def _use_rebase_for_merging(self, pull_request):
1132 1137 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1133 1138 settings = settings_model.get_general_settings()
1134 1139 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1135 1140
1136 1141 def _log_action(self, action, user, pull_request):
1137 1142 action_logger(
1138 1143 user,
1139 1144 '{action}:{pr_id}'.format(
1140 1145 action=action, pr_id=pull_request.pull_request_id),
1141 1146 pull_request.target_repo)
1142 1147
1143 1148
1144 1149 ChangeTuple = namedtuple('ChangeTuple',
1145 1150 ['added', 'common', 'removed'])
1146 1151
1147 1152 FileChangeTuple = namedtuple('FileChangeTuple',
1148 1153 ['added', 'modified', 'removed'])
@@ -1,931 +1,934 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime
32 32
33 33 from sqlalchemy.sql import func
34 34 from sqlalchemy.sql.expression import true, or_
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 40 from rhodecode.lib.caching_query import FromCache
41 41 from rhodecode.lib.exceptions import AttachedForksError
42 42 from rhodecode.lib.hooks_base import log_delete_repository
43 43 from rhodecode.lib.utils import make_db_config
44 44 from rhodecode.lib.utils2 import (
45 45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 47 from rhodecode.lib.vcs.backends import get_backend
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 52 RepoGroup, RepositoryField)
53 53 from rhodecode.model.scm import UserGroupList
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoModel(BaseModel):
61 61
62 62 cls = Repository
63 63
64 64 def _get_user_group(self, users_group):
65 65 return self._get_instance(UserGroup, users_group,
66 66 callback=UserGroup.get_by_group_name)
67 67
68 68 def _get_repo_group(self, repo_group):
69 69 return self._get_instance(RepoGroup, repo_group,
70 70 callback=RepoGroup.get_by_group_name)
71 71
72 72 def _create_default_perms(self, repository, private):
73 73 # create default permission
74 74 default = 'repository.read'
75 75 def_user = User.get_default_user()
76 76 for p in def_user.user_perms:
77 77 if p.permission.permission_name.startswith('repository.'):
78 78 default = p.permission.permission_name
79 79 break
80 80
81 81 default_perm = 'repository.none' if private else default
82 82
83 83 repo_to_perm = UserRepoToPerm()
84 84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85 85
86 86 repo_to_perm.repository = repository
87 87 repo_to_perm.user_id = def_user.user_id
88 88
89 89 return repo_to_perm
90 90
91 91 @LazyProperty
92 92 def repos_path(self):
93 93 """
94 94 Gets the repositories root path from database
95 95 """
96 96 settings_model = VcsSettingsModel(sa=self.sa)
97 97 return settings_model.get_repos_location()
98 98
99 99 def get(self, repo_id, cache=False):
100 100 repo = self.sa.query(Repository) \
101 101 .filter(Repository.repo_id == repo_id)
102 102
103 103 if cache:
104 104 repo = repo.options(FromCache("sql_cache_short",
105 105 "get_repo_%s" % repo_id))
106 106 return repo.scalar()
107 107
108 108 def get_repo(self, repository):
109 109 return self._get_repo(repository)
110 110
111 111 def get_by_repo_name(self, repo_name, cache=False):
112 112 repo = self.sa.query(Repository) \
113 113 .filter(Repository.repo_name == repo_name)
114 114
115 115 if cache:
116 116 repo = repo.options(FromCache("sql_cache_short",
117 117 "get_repo_%s" % repo_name))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135 try:
136 136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 137 if _repo_id:
138 138 return self.get(_repo_id)
139 139 except Exception:
140 140 log.exception('Failed to extract repo_name from URL')
141 141
142 142 return None
143 143
144 def get_url(self, repo):
145 return url('summary_home', repo_name=repo.repo_name, qualified=True)
146
144 147 def get_users(self, name_contains=None, limit=20, only_active=True):
145 148 # TODO: mikhail: move this method to the UserModel.
146 149 query = self.sa.query(User)
147 150 if only_active:
148 151 query = query.filter(User.active == true())
149 152
150 153 if name_contains:
151 154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
152 155 query = query.filter(
153 156 or_(
154 157 User.name.ilike(ilike_expression),
155 158 User.lastname.ilike(ilike_expression),
156 159 User.username.ilike(ilike_expression)
157 160 )
158 161 )
159 162 query = query.limit(limit)
160 163 users = query.all()
161 164
162 165 _users = [
163 166 {
164 167 'id': user.user_id,
165 168 'first_name': user.name,
166 169 'last_name': user.lastname,
167 170 'username': user.username,
168 171 'icon_link': h.gravatar_url(user.email, 14),
169 172 'value_display': h.person(user.email),
170 173 'value': user.username,
171 174 'value_type': 'user',
172 175 'active': user.active,
173 176 }
174 177 for user in users
175 178 ]
176 179 return _users
177 180
178 181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
179 182 # TODO: mikhail: move this method to the UserGroupModel.
180 183 query = self.sa.query(UserGroup)
181 184 if only_active:
182 185 query = query.filter(UserGroup.users_group_active == true())
183 186
184 187 if name_contains:
185 188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
186 189 query = query.filter(
187 190 UserGroup.users_group_name.ilike(ilike_expression))\
188 191 .order_by(func.length(UserGroup.users_group_name))\
189 192 .order_by(UserGroup.users_group_name)
190 193
191 194 query = query.limit(limit)
192 195 user_groups = query.all()
193 196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
194 197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
195 198
196 199 _groups = [
197 200 {
198 201 'id': group.users_group_id,
199 202 # TODO: marcink figure out a way to generate the url for the
200 203 # icon
201 204 'icon_link': '',
202 205 'value_display': 'Group: %s (%d members)' % (
203 206 group.users_group_name, len(group.members),),
204 207 'value': group.users_group_name,
205 208 'value_type': 'user_group',
206 209 'active': group.users_group_active,
207 210 }
208 211 for group in user_groups
209 212 ]
210 213 return _groups
211 214
212 215 @classmethod
213 216 def update_repoinfo(cls, repositories=None):
214 217 if not repositories:
215 218 repositories = Repository.getAll()
216 219 for repo in repositories:
217 220 repo.update_commit_cache()
218 221
219 222 def get_repos_as_dict(self, repo_list=None, admin=False,
220 223 super_user_actions=False):
221 224
222 225 from rhodecode.lib.utils import PartialRenderer
223 226 _render = PartialRenderer('data_table/_dt_elements.html')
224 227 c = _render.c
225 228
226 229 def quick_menu(repo_name):
227 230 return _render('quick_menu', repo_name)
228 231
229 232 def repo_lnk(name, rtype, rstate, private, fork_of):
230 233 return _render('repo_name', name, rtype, rstate, private, fork_of,
231 234 short_name=not admin, admin=False)
232 235
233 236 def last_change(last_change):
234 237 return _render("last_change", last_change)
235 238
236 239 def rss_lnk(repo_name):
237 240 return _render("rss", repo_name)
238 241
239 242 def atom_lnk(repo_name):
240 243 return _render("atom", repo_name)
241 244
242 245 def last_rev(repo_name, cs_cache):
243 246 return _render('revision', repo_name, cs_cache.get('revision'),
244 247 cs_cache.get('raw_id'), cs_cache.get('author'),
245 248 cs_cache.get('message'))
246 249
247 250 def desc(desc):
248 251 if c.visual.stylify_metatags:
249 252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
250 253 else:
251 254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
252 255
253 256 def state(repo_state):
254 257 return _render("repo_state", repo_state)
255 258
256 259 def repo_actions(repo_name):
257 260 return _render('repo_actions', repo_name, super_user_actions)
258 261
259 262 def user_profile(username):
260 263 return _render('user_profile', username)
261 264
262 265 repos_data = []
263 266 for repo in repo_list:
264 267 cs_cache = repo.changeset_cache
265 268 row = {
266 269 "menu": quick_menu(repo.repo_name),
267 270
268 271 "name": repo_lnk(repo.repo_name, repo.repo_type,
269 272 repo.repo_state, repo.private, repo.fork),
270 273 "name_raw": repo.repo_name.lower(),
271 274
272 275 "last_change": last_change(repo.last_db_change),
273 276 "last_change_raw": datetime_to_time(repo.last_db_change),
274 277
275 278 "last_changeset": last_rev(repo.repo_name, cs_cache),
276 279 "last_changeset_raw": cs_cache.get('revision'),
277 280
278 281 "desc": desc(repo.description),
279 282 "owner": user_profile(repo.user.username),
280 283
281 284 "state": state(repo.repo_state),
282 285 "rss": rss_lnk(repo.repo_name),
283 286
284 287 "atom": atom_lnk(repo.repo_name),
285 288 }
286 289 if admin:
287 290 row.update({
288 291 "action": repo_actions(repo.repo_name),
289 292 })
290 293 repos_data.append(row)
291 294
292 295 return repos_data
293 296
294 297 def _get_defaults(self, repo_name):
295 298 """
296 299 Gets information about repository, and returns a dict for
297 300 usage in forms
298 301
299 302 :param repo_name:
300 303 """
301 304
302 305 repo_info = Repository.get_by_repo_name(repo_name)
303 306
304 307 if repo_info is None:
305 308 return None
306 309
307 310 defaults = repo_info.get_dict()
308 311 defaults['repo_name'] = repo_info.just_name
309 312
310 313 groups = repo_info.groups_with_parents
311 314 parent_group = groups[-1] if groups else None
312 315
313 316 # we use -1 as this is how in HTML, we mark an empty group
314 317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
315 318
316 319 keys_to_process = (
317 320 {'k': 'repo_type', 'strip': False},
318 321 {'k': 'repo_enable_downloads', 'strip': True},
319 322 {'k': 'repo_description', 'strip': True},
320 323 {'k': 'repo_enable_locking', 'strip': True},
321 324 {'k': 'repo_landing_rev', 'strip': True},
322 325 {'k': 'clone_uri', 'strip': False},
323 326 {'k': 'repo_private', 'strip': True},
324 327 {'k': 'repo_enable_statistics', 'strip': True}
325 328 )
326 329
327 330 for item in keys_to_process:
328 331 attr = item['k']
329 332 if item['strip']:
330 333 attr = remove_prefix(item['k'], 'repo_')
331 334
332 335 val = defaults[attr]
333 336 if item['k'] == 'repo_landing_rev':
334 337 val = ':'.join(defaults[attr])
335 338 defaults[item['k']] = val
336 339 if item['k'] == 'clone_uri':
337 340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
338 341
339 342 # fill owner
340 343 if repo_info.user:
341 344 defaults.update({'user': repo_info.user.username})
342 345 else:
343 346 replacement_user = User.get_first_super_admin().username
344 347 defaults.update({'user': replacement_user})
345 348
346 349 # fill repository users
347 350 for p in repo_info.repo_to_perm:
348 351 defaults.update({'u_perm_%s' % p.user.user_id:
349 352 p.permission.permission_name})
350 353
351 354 # fill repository groups
352 355 for p in repo_info.users_group_to_perm:
353 356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
354 357 p.permission.permission_name})
355 358
356 359 return defaults
357 360
358 361 def update(self, repo, **kwargs):
359 362 try:
360 363 cur_repo = self._get_repo(repo)
361 364 source_repo_name = cur_repo.repo_name
362 365 if 'user' in kwargs:
363 366 cur_repo.user = User.get_by_username(kwargs['user'])
364 367
365 368 if 'repo_group' in kwargs:
366 369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
367 370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
368 371
369 372 update_keys = [
370 373 (1, 'repo_enable_downloads'),
371 374 (1, 'repo_description'),
372 375 (1, 'repo_enable_locking'),
373 376 (1, 'repo_landing_rev'),
374 377 (1, 'repo_private'),
375 378 (1, 'repo_enable_statistics'),
376 379 (0, 'clone_uri'),
377 380 (0, 'fork_id')
378 381 ]
379 382 for strip, k in update_keys:
380 383 if k in kwargs:
381 384 val = kwargs[k]
382 385 if strip:
383 386 k = remove_prefix(k, 'repo_')
384 387 if k == 'clone_uri':
385 388 from rhodecode.model.validators import Missing
386 389 _change = kwargs.get('clone_uri_change')
387 390 if _change in [Missing, 'OLD']:
388 391 # we don't change the value, so use original one
389 392 val = cur_repo.clone_uri
390 393
391 394 setattr(cur_repo, k, val)
392 395
393 396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
394 397 cur_repo.repo_name = new_name
395 398
396 399 # if private flag is set, reset default permission to NONE
397 400 if kwargs.get('repo_private'):
398 401 EMPTY_PERM = 'repository.none'
399 402 RepoModel().grant_user_permission(
400 403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
401 404 )
402 405
403 406 # handle extra fields
404 407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
405 408 kwargs):
406 409 k = RepositoryField.un_prefix_key(field)
407 410 ex_field = RepositoryField.get_by_key_name(
408 411 key=k, repo=cur_repo)
409 412 if ex_field:
410 413 ex_field.field_value = kwargs[field]
411 414 self.sa.add(ex_field)
412 415 self.sa.add(cur_repo)
413 416
414 417 if source_repo_name != new_name:
415 418 # rename repository
416 419 self._rename_filesystem_repo(
417 420 old=source_repo_name, new=new_name)
418 421
419 422 return cur_repo
420 423 except Exception:
421 424 log.error(traceback.format_exc())
422 425 raise
423 426
424 427 def _create_repo(self, repo_name, repo_type, description, owner,
425 428 private=False, clone_uri=None, repo_group=None,
426 429 landing_rev='rev:tip', fork_of=None,
427 430 copy_fork_permissions=False, enable_statistics=False,
428 431 enable_locking=False, enable_downloads=False,
429 432 copy_group_permissions=False,
430 433 state=Repository.STATE_PENDING):
431 434 """
432 435 Create repository inside database with PENDING state, this should be
433 436 only executed by create() repo. With exception of importing existing
434 437 repos
435 438 """
436 439 from rhodecode.model.scm import ScmModel
437 440
438 441 owner = self._get_user(owner)
439 442 fork_of = self._get_repo(fork_of)
440 443 repo_group = self._get_repo_group(safe_int(repo_group))
441 444
442 445 try:
443 446 repo_name = safe_unicode(repo_name)
444 447 description = safe_unicode(description)
445 448 # repo name is just a name of repository
446 449 # while repo_name_full is a full qualified name that is combined
447 450 # with name and path of group
448 451 repo_name_full = repo_name
449 452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
450 453
451 454 new_repo = Repository()
452 455 new_repo.repo_state = state
453 456 new_repo.enable_statistics = False
454 457 new_repo.repo_name = repo_name_full
455 458 new_repo.repo_type = repo_type
456 459 new_repo.user = owner
457 460 new_repo.group = repo_group
458 461 new_repo.description = description or repo_name
459 462 new_repo.private = private
460 463 new_repo.clone_uri = clone_uri
461 464 new_repo.landing_rev = landing_rev
462 465
463 466 new_repo.enable_statistics = enable_statistics
464 467 new_repo.enable_locking = enable_locking
465 468 new_repo.enable_downloads = enable_downloads
466 469
467 470 if repo_group:
468 471 new_repo.enable_locking = repo_group.enable_locking
469 472
470 473 if fork_of:
471 474 parent_repo = fork_of
472 475 new_repo.fork = parent_repo
473 476
474 477 events.trigger(events.RepoPreCreateEvent(new_repo))
475 478
476 479 self.sa.add(new_repo)
477 480
478 481 EMPTY_PERM = 'repository.none'
479 482 if fork_of and copy_fork_permissions:
480 483 repo = fork_of
481 484 user_perms = UserRepoToPerm.query() \
482 485 .filter(UserRepoToPerm.repository == repo).all()
483 486 group_perms = UserGroupRepoToPerm.query() \
484 487 .filter(UserGroupRepoToPerm.repository == repo).all()
485 488
486 489 for perm in user_perms:
487 490 UserRepoToPerm.create(
488 491 perm.user, new_repo, perm.permission)
489 492
490 493 for perm in group_perms:
491 494 UserGroupRepoToPerm.create(
492 495 perm.users_group, new_repo, perm.permission)
493 496 # in case we copy permissions and also set this repo to private
494 497 # override the default user permission to make it a private
495 498 # repo
496 499 if private:
497 500 RepoModel(self.sa).grant_user_permission(
498 501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
499 502
500 503 elif repo_group and copy_group_permissions:
501 504 user_perms = UserRepoGroupToPerm.query() \
502 505 .filter(UserRepoGroupToPerm.group == repo_group).all()
503 506
504 507 group_perms = UserGroupRepoGroupToPerm.query() \
505 508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
506 509
507 510 for perm in user_perms:
508 511 perm_name = perm.permission.permission_name.replace(
509 512 'group.', 'repository.')
510 513 perm_obj = Permission.get_by_key(perm_name)
511 514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
512 515
513 516 for perm in group_perms:
514 517 perm_name = perm.permission.permission_name.replace(
515 518 'group.', 'repository.')
516 519 perm_obj = Permission.get_by_key(perm_name)
517 520 UserGroupRepoToPerm.create(
518 521 perm.users_group, new_repo, perm_obj)
519 522
520 523 if private:
521 524 RepoModel(self.sa).grant_user_permission(
522 525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
523 526
524 527 else:
525 528 perm_obj = self._create_default_perms(new_repo, private)
526 529 self.sa.add(perm_obj)
527 530
528 531 # now automatically start following this repository as owner
529 532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
530 533 owner.user_id)
531 534
532 535 # we need to flush here, in order to check if database won't
533 536 # throw any exceptions, create filesystem dirs at the very end
534 537 self.sa.flush()
535 538 events.trigger(events.RepoCreatedEvent(new_repo))
536 539 return new_repo
537 540
538 541 except Exception:
539 542 log.error(traceback.format_exc())
540 543 raise
541 544
542 545 def create(self, form_data, cur_user):
543 546 """
544 547 Create repository using celery tasks
545 548
546 549 :param form_data:
547 550 :param cur_user:
548 551 """
549 552 from rhodecode.lib.celerylib import tasks, run_task
550 553 return run_task(tasks.create_repo, form_data, cur_user)
551 554
552 555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
553 556 perm_deletions=None, check_perms=True,
554 557 cur_user=None):
555 558 if not perm_additions:
556 559 perm_additions = []
557 560 if not perm_updates:
558 561 perm_updates = []
559 562 if not perm_deletions:
560 563 perm_deletions = []
561 564
562 565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
563 566
564 567 # update permissions
565 568 for member_id, perm, member_type in perm_updates:
566 569 member_id = int(member_id)
567 570 if member_type == 'user':
568 571 # this updates also current one if found
569 572 self.grant_user_permission(
570 573 repo=repo, user=member_id, perm=perm)
571 574 else: # set for user group
572 575 # check if we have permissions to alter this usergroup
573 576 member_name = UserGroup.get(member_id).users_group_name
574 577 if not check_perms or HasUserGroupPermissionAny(
575 578 *req_perms)(member_name, user=cur_user):
576 579 self.grant_user_group_permission(
577 580 repo=repo, group_name=member_id, perm=perm)
578 581
579 582 # set new permissions
580 583 for member_id, perm, member_type in perm_additions:
581 584 member_id = int(member_id)
582 585 if member_type == 'user':
583 586 self.grant_user_permission(
584 587 repo=repo, user=member_id, perm=perm)
585 588 else: # set for user group
586 589 # check if we have permissions to alter this usergroup
587 590 member_name = UserGroup.get(member_id).users_group_name
588 591 if not check_perms or HasUserGroupPermissionAny(
589 592 *req_perms)(member_name, user=cur_user):
590 593 self.grant_user_group_permission(
591 594 repo=repo, group_name=member_id, perm=perm)
592 595
593 596 # delete permissions
594 597 for member_id, perm, member_type in perm_deletions:
595 598 member_id = int(member_id)
596 599 if member_type == 'user':
597 600 self.revoke_user_permission(repo=repo, user=member_id)
598 601 else: # set for user group
599 602 # check if we have permissions to alter this usergroup
600 603 member_name = UserGroup.get(member_id).users_group_name
601 604 if not check_perms or HasUserGroupPermissionAny(
602 605 *req_perms)(member_name, user=cur_user):
603 606 self.revoke_user_group_permission(
604 607 repo=repo, group_name=member_id)
605 608
606 609 def create_fork(self, form_data, cur_user):
607 610 """
608 611 Simple wrapper into executing celery task for fork creation
609 612
610 613 :param form_data:
611 614 :param cur_user:
612 615 """
613 616 from rhodecode.lib.celerylib import tasks, run_task
614 617 return run_task(tasks.create_repo_fork, form_data, cur_user)
615 618
616 619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
617 620 """
618 621 Delete given repository, forks parameter defines what do do with
619 622 attached forks. Throws AttachedForksError if deleted repo has attached
620 623 forks
621 624
622 625 :param repo:
623 626 :param forks: str 'delete' or 'detach'
624 627 :param fs_remove: remove(archive) repo from filesystem
625 628 """
626 629 if not cur_user:
627 630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
628 631 repo = self._get_repo(repo)
629 632 if repo:
630 633 if forks == 'detach':
631 634 for r in repo.forks:
632 635 r.fork = None
633 636 self.sa.add(r)
634 637 elif forks == 'delete':
635 638 for r in repo.forks:
636 639 self.delete(r, forks='delete')
637 640 elif [f for f in repo.forks]:
638 641 raise AttachedForksError()
639 642
640 643 old_repo_dict = repo.get_dict()
641 644 events.trigger(events.RepoPreDeleteEvent(repo))
642 645 try:
643 646 self.sa.delete(repo)
644 647 if fs_remove:
645 648 self._delete_filesystem_repo(repo)
646 649 else:
647 650 log.debug('skipping removal from filesystem')
648 651 old_repo_dict.update({
649 652 'deleted_by': cur_user,
650 653 'deleted_on': time.time(),
651 654 })
652 655 log_delete_repository(**old_repo_dict)
653 656 events.trigger(events.RepoDeletedEvent(repo))
654 657 except Exception:
655 658 log.error(traceback.format_exc())
656 659 raise
657 660
658 661 def grant_user_permission(self, repo, user, perm):
659 662 """
660 663 Grant permission for user on given repository, or update existing one
661 664 if found
662 665
663 666 :param repo: Instance of Repository, repository_id, or repository name
664 667 :param user: Instance of User, user_id or username
665 668 :param perm: Instance of Permission, or permission_name
666 669 """
667 670 user = self._get_user(user)
668 671 repo = self._get_repo(repo)
669 672 permission = self._get_perm(perm)
670 673
671 674 # check if we have that permission already
672 675 obj = self.sa.query(UserRepoToPerm) \
673 676 .filter(UserRepoToPerm.user == user) \
674 677 .filter(UserRepoToPerm.repository == repo) \
675 678 .scalar()
676 679 if obj is None:
677 680 # create new !
678 681 obj = UserRepoToPerm()
679 682 obj.repository = repo
680 683 obj.user = user
681 684 obj.permission = permission
682 685 self.sa.add(obj)
683 686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
684 687 action_logger_generic(
685 688 'granted permission: {} to user: {} on repo: {}'.format(
686 689 perm, user, repo), namespace='security.repo')
687 690 return obj
688 691
689 692 def revoke_user_permission(self, repo, user):
690 693 """
691 694 Revoke permission for user on given repository
692 695
693 696 :param repo: Instance of Repository, repository_id, or repository name
694 697 :param user: Instance of User, user_id or username
695 698 """
696 699
697 700 user = self._get_user(user)
698 701 repo = self._get_repo(repo)
699 702
700 703 obj = self.sa.query(UserRepoToPerm) \
701 704 .filter(UserRepoToPerm.repository == repo) \
702 705 .filter(UserRepoToPerm.user == user) \
703 706 .scalar()
704 707 if obj:
705 708 self.sa.delete(obj)
706 709 log.debug('Revoked perm on %s on %s', repo, user)
707 710 action_logger_generic(
708 711 'revoked permission from user: {} on repo: {}'.format(
709 712 user, repo), namespace='security.repo')
710 713
711 714 def grant_user_group_permission(self, repo, group_name, perm):
712 715 """
713 716 Grant permission for user group on given repository, or update
714 717 existing one if found
715 718
716 719 :param repo: Instance of Repository, repository_id, or repository name
717 720 :param group_name: Instance of UserGroup, users_group_id,
718 721 or user group name
719 722 :param perm: Instance of Permission, or permission_name
720 723 """
721 724 repo = self._get_repo(repo)
722 725 group_name = self._get_user_group(group_name)
723 726 permission = self._get_perm(perm)
724 727
725 728 # check if we have that permission already
726 729 obj = self.sa.query(UserGroupRepoToPerm) \
727 730 .filter(UserGroupRepoToPerm.users_group == group_name) \
728 731 .filter(UserGroupRepoToPerm.repository == repo) \
729 732 .scalar()
730 733
731 734 if obj is None:
732 735 # create new
733 736 obj = UserGroupRepoToPerm()
734 737
735 738 obj.repository = repo
736 739 obj.users_group = group_name
737 740 obj.permission = permission
738 741 self.sa.add(obj)
739 742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
740 743 action_logger_generic(
741 744 'granted permission: {} to usergroup: {} on repo: {}'.format(
742 745 perm, group_name, repo), namespace='security.repo')
743 746
744 747 return obj
745 748
746 749 def revoke_user_group_permission(self, repo, group_name):
747 750 """
748 751 Revoke permission for user group on given repository
749 752
750 753 :param repo: Instance of Repository, repository_id, or repository name
751 754 :param group_name: Instance of UserGroup, users_group_id,
752 755 or user group name
753 756 """
754 757 repo = self._get_repo(repo)
755 758 group_name = self._get_user_group(group_name)
756 759
757 760 obj = self.sa.query(UserGroupRepoToPerm) \
758 761 .filter(UserGroupRepoToPerm.repository == repo) \
759 762 .filter(UserGroupRepoToPerm.users_group == group_name) \
760 763 .scalar()
761 764 if obj:
762 765 self.sa.delete(obj)
763 766 log.debug('Revoked perm to %s on %s', repo, group_name)
764 767 action_logger_generic(
765 768 'revoked permission from usergroup: {} on repo: {}'.format(
766 769 group_name, repo), namespace='security.repo')
767 770
768 771 def delete_stats(self, repo_name):
769 772 """
770 773 removes stats for given repo
771 774
772 775 :param repo_name:
773 776 """
774 777 repo = self._get_repo(repo_name)
775 778 try:
776 779 obj = self.sa.query(Statistics) \
777 780 .filter(Statistics.repository == repo).scalar()
778 781 if obj:
779 782 self.sa.delete(obj)
780 783 except Exception:
781 784 log.error(traceback.format_exc())
782 785 raise
783 786
784 787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
785 788 field_type='str', field_desc=''):
786 789
787 790 repo = self._get_repo(repo_name)
788 791
789 792 new_field = RepositoryField()
790 793 new_field.repository = repo
791 794 new_field.field_key = field_key
792 795 new_field.field_type = field_type # python type
793 796 new_field.field_value = field_value
794 797 new_field.field_desc = field_desc
795 798 new_field.field_label = field_label
796 799 self.sa.add(new_field)
797 800 return new_field
798 801
799 802 def delete_repo_field(self, repo_name, field_key):
800 803 repo = self._get_repo(repo_name)
801 804 field = RepositoryField.get_by_key_name(field_key, repo)
802 805 if field:
803 806 self.sa.delete(field)
804 807
805 808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
806 809 clone_uri=None, repo_store_location=None,
807 810 use_global_config=False):
808 811 """
809 812 makes repository on filesystem. It's group aware means it'll create
810 813 a repository within a group, and alter the paths accordingly of
811 814 group location
812 815
813 816 :param repo_name:
814 817 :param alias:
815 818 :param parent:
816 819 :param clone_uri:
817 820 :param repo_store_location:
818 821 """
819 822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
820 823 from rhodecode.model.scm import ScmModel
821 824
822 825 if Repository.NAME_SEP in repo_name:
823 826 raise ValueError(
824 827 'repo_name must not contain groups got `%s`' % repo_name)
825 828
826 829 if isinstance(repo_group, RepoGroup):
827 830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
828 831 else:
829 832 new_parent_path = repo_group or ''
830 833
831 834 if repo_store_location:
832 835 _paths = [repo_store_location]
833 836 else:
834 837 _paths = [self.repos_path, new_parent_path, repo_name]
835 838 # we need to make it str for mercurial
836 839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
837 840
838 841 # check if this path is not a repository
839 842 if is_valid_repo(repo_path, self.repos_path):
840 843 raise Exception('This path %s is a valid repository' % repo_path)
841 844
842 845 # check if this path is a group
843 846 if is_valid_repo_group(repo_path, self.repos_path):
844 847 raise Exception('This path %s is a valid group' % repo_path)
845 848
846 849 log.info('creating repo %s in %s from url: `%s`',
847 850 repo_name, safe_unicode(repo_path),
848 851 obfuscate_url_pw(clone_uri))
849 852
850 853 backend = get_backend(repo_type)
851 854
852 855 config_repo = None if use_global_config else repo_name
853 856 if config_repo and new_parent_path:
854 857 config_repo = Repository.NAME_SEP.join(
855 858 (new_parent_path, config_repo))
856 859 config = make_db_config(clear_session=False, repo=config_repo)
857 860 config.set('extensions', 'largefiles', '')
858 861
859 862 # patch and reset hooks section of UI config to not run any
860 863 # hooks on creating remote repo
861 864 config.clear_section('hooks')
862 865
863 866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
864 867 if repo_type == 'git':
865 868 repo = backend(
866 869 repo_path, config=config, create=True, src_url=clone_uri,
867 870 bare=True)
868 871 else:
869 872 repo = backend(
870 873 repo_path, config=config, create=True, src_url=clone_uri)
871 874
872 875 ScmModel().install_hooks(repo, repo_type=repo_type)
873 876
874 877 log.debug('Created repo %s with %s backend',
875 878 safe_unicode(repo_name), safe_unicode(repo_type))
876 879 return repo
877 880
878 881 def _rename_filesystem_repo(self, old, new):
879 882 """
880 883 renames repository on filesystem
881 884
882 885 :param old: old name
883 886 :param new: new name
884 887 """
885 888 log.info('renaming repo from %s to %s', old, new)
886 889
887 890 old_path = os.path.join(self.repos_path, old)
888 891 new_path = os.path.join(self.repos_path, new)
889 892 if os.path.isdir(new_path):
890 893 raise Exception(
891 894 'Was trying to rename to already existing dir %s' % new_path
892 895 )
893 896 shutil.move(old_path, new_path)
894 897
895 898 def _delete_filesystem_repo(self, repo):
896 899 """
897 900 removes repo from filesystem, the removal is acctually made by
898 901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
899 902 repository is no longer valid for rhodecode, can be undeleted later on
900 903 by reverting the renames on this repository
901 904
902 905 :param repo: repo object
903 906 """
904 907 rm_path = os.path.join(self.repos_path, repo.repo_name)
905 908 repo_group = repo.group
906 909 log.info("Removing repository %s", rm_path)
907 910 # disable hg/git internal that it doesn't get detected as repo
908 911 alias = repo.repo_type
909 912
910 913 config = make_db_config(clear_session=False)
911 914 config.set('extensions', 'largefiles', '')
912 915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
913 916
914 917 # skip this for bare git repos
915 918 if not bare:
916 919 # disable VCS repo
917 920 vcs_path = os.path.join(rm_path, '.%s' % alias)
918 921 if os.path.exists(vcs_path):
919 922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
920 923
921 924 _now = datetime.now()
922 925 _ms = str(_now.microsecond).rjust(6, '0')
923 926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
924 927 repo.just_name)
925 928 if repo_group:
926 929 # if repository is in group, prefix the removal path with the group
927 930 args = repo_group.full_path_splitted + [_d]
928 931 _d = os.path.join(*args)
929 932
930 933 if os.path.isdir(rm_path):
931 934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,249 +1,250 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Import early to make sure things are patched up properly
4 4 from setuptools import setup, find_packages
5 5
6 6 import os
7 7 import sys
8 8 import platform
9 9
10 10 if sys.version_info < (2, 7):
11 11 raise Exception('RhodeCode requires Python 2.7 or later')
12 12
13 13
14 14 here = os.path.abspath(os.path.dirname(__file__))
15 15
16 16
17 17 def _get_meta_var(name, data, callback_handler=None):
18 18 import re
19 19 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
20 20 if matches:
21 21 if not callable(callback_handler):
22 22 callback_handler = lambda v: v
23 23
24 24 return callback_handler(eval(matches.groups()[0]))
25 25
26 26 _meta = open(os.path.join(here, 'rhodecode', '__init__.py'), 'rb')
27 27 _metadata = _meta.read()
28 28 _meta.close()
29 29
30 30 callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
31 31 __version__ = open(os.path.join('rhodecode', 'VERSION')).read().strip()
32 32 __license__ = _get_meta_var('__license__', _metadata)
33 33 __author__ = _get_meta_var('__author__', _metadata)
34 34 __url__ = _get_meta_var('__url__', _metadata)
35 35 # defines current platform
36 36 __platform__ = platform.system()
37 37
38 38 # Cygwin has different platform identifiers, but they all contain the
39 39 # term "CYGWIN"
40 40 is_windows = __platform__ == 'Windows' or 'CYGWIN' in __platform__
41 41
42 42 requirements = [
43 43 'Babel',
44 44 'Beaker',
45 45 'FormEncode',
46 46 'Mako',
47 47 'Markdown',
48 48 'MarkupSafe',
49 49 'MySQL-python',
50 50 'Paste',
51 51 'PasteDeploy',
52 52 'PasteScript',
53 53 'Pygments',
54 54 'Pylons',
55 55 'Pyro4',
56 56 'Routes',
57 57 'SQLAlchemy',
58 58 'Tempita',
59 59 'URLObject',
60 60 'WebError',
61 61 'WebHelpers',
62 62 'WebHelpers2',
63 63 'WebOb',
64 64 'WebTest',
65 65 'Whoosh',
66 66 'alembic',
67 67 'amqplib',
68 68 'anyjson',
69 69 'appenlight-client',
70 70 'authomatic',
71 71 'backport_ipaddress',
72 72 'celery',
73 73 'colander',
74 74 'decorator',
75 75 'docutils',
76 76 'gunicorn',
77 77 'infrae.cache',
78 78 'ipython',
79 79 'iso8601',
80 80 'kombu',
81 'marshmallow',
81 82 'msgpack-python',
82 83 'packaging',
83 84 'psycopg2',
84 85 'py-gfm',
85 86 'pycrypto',
86 87 'pycurl',
87 88 'pyparsing',
88 89 'pyramid',
89 90 'pyramid-debugtoolbar',
90 91 'pyramid-mako',
91 92 'pyramid-beaker',
92 93 'pysqlite',
93 94 'python-dateutil',
94 95 'python-ldap',
95 96 'python-memcached',
96 97 'python-pam',
97 98 'recaptcha-client',
98 99 'repoze.lru',
99 100 'requests',
100 101 'simplejson',
101 102 'waitress',
102 103 'zope.cachedescriptors',
103 104 'dogpile.cache',
104 105 'dogpile.core'
105 106 ]
106 107
107 108 if is_windows:
108 109 pass
109 110 else:
110 111 requirements.append('psutil')
111 112 requirements.append('py-bcrypt')
112 113
113 114 test_requirements = [
114 115 'WebTest',
115 116 'configobj',
116 117 'cssselect',
117 118 'flake8',
118 119 'lxml',
119 120 'mock',
120 121 'pytest',
121 122 'pytest-cov',
122 123 'pytest-runner',
123 124 ]
124 125
125 126 setup_requirements = [
126 127 'PasteScript',
127 128 'pytest-runner',
128 129 ]
129 130
130 131 dependency_links = [
131 132 ]
132 133
133 134 classifiers = [
134 135 'Development Status :: 6 - Mature',
135 136 'Environment :: Web Environment',
136 137 'Framework :: Pylons',
137 138 'Intended Audience :: Developers',
138 139 'Operating System :: OS Independent',
139 140 'Programming Language :: Python',
140 141 'Programming Language :: Python :: 2.7',
141 142 ]
142 143
143 144
144 145 # additional files from project that goes somewhere in the filesystem
145 146 # relative to sys.prefix
146 147 data_files = []
147 148
148 149 # additional files that goes into package itself
149 150 package_data = {'rhodecode': ['i18n/*/LC_MESSAGES/*.mo', ], }
150 151
151 152 description = ('RhodeCode is a fast and powerful management tool '
152 153 'for Mercurial and GIT with a built in push/pull server, '
153 154 'full text search and code-review.')
154 155
155 156 keywords = ' '.join([
156 157 'rhodecode', 'rhodiumcode', 'mercurial', 'git', 'code review',
157 158 'repo groups', 'ldap', 'repository management', 'hgweb replacement',
158 159 'hgwebdir', 'gitweb replacement', 'serving hgweb',
159 160 ])
160 161
161 162 # long description
162 163 README_FILE = 'README.rst'
163 164 CHANGELOG_FILE = 'CHANGES.rst'
164 165 try:
165 166 long_description = open(README_FILE).read() + '\n\n' + \
166 167 open(CHANGELOG_FILE).read()
167 168
168 169 except IOError, err:
169 170 sys.stderr.write(
170 171 '[WARNING] Cannot find file specified as long_description (%s)\n or '
171 172 'changelog (%s) skipping that file' % (README_FILE, CHANGELOG_FILE)
172 173 )
173 174 long_description = description
174 175
175 176 # packages
176 177 packages = find_packages()
177 178
178 179 paster_commands = [
179 180 'make-config=rhodecode.lib.paster_commands.make_config:Command',
180 181 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
181 182 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
182 183 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
183 184 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 185 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
185 186 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
186 187 ]
187 188
188 189 setup(
189 190 name='rhodecode-enterprise-ce',
190 191 version=__version__,
191 192 description=description,
192 193 long_description=long_description,
193 194 keywords=keywords,
194 195 license=__license__,
195 196 author=__author__,
196 197 author_email='marcin@rhodecode.com',
197 198 dependency_links=dependency_links,
198 199 url=__url__,
199 200 install_requires=requirements,
200 201 tests_require=test_requirements,
201 202 classifiers=classifiers,
202 203 setup_requires=setup_requirements,
203 204 data_files=data_files,
204 205 packages=packages,
205 206 include_package_data=True,
206 207 package_data=package_data,
207 208 message_extractors={
208 209 'rhodecode': [
209 210 ('**.py', 'python', None),
210 211 ('**.js', 'javascript', None),
211 212 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
212 213 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
213 214 ('public/**', 'ignore', None),
214 215 ]
215 216 },
216 217 zip_safe=False,
217 218 paster_plugins=['PasteScript', 'Pylons'],
218 219 entry_points={
219 220 'enterprise.plugins1': [
220 221 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 222 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 223 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 224 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 225 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 226 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 227 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 228 ],
228 229 'paste.app_factory': [
229 230 'main=rhodecode.config.middleware:make_pyramid_app',
230 231 'pylons=rhodecode.config.middleware:make_app',
231 232 ],
232 233 'paste.app_install': [
233 234 'main=pylons.util:PylonsInstaller',
234 235 'pylons=pylons.util:PylonsInstaller',
235 236 ],
236 237 'paste.global_paster_command': paster_commands,
237 238 'pytest11': [
238 239 'pylons=rhodecode.tests.pylons_plugin',
239 240 'enterprise=rhodecode.tests.plugin',
240 241 ],
241 242 'console_scripts': [
242 243 'rcserver=rhodecode.rcserver:main',
243 244 ],
244 245 'beaker.backends': [
245 246 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
246 247 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
247 248 ]
248 249 },
249 250 )
General Comments 0
You need to be logged in to leave comments. Login now