##// END OF EJS Templates
dependencies: bumped pyramid-debugtoolbar to 4.3.1
marcink -
r1907:d622a0b9 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,2086 +1,2086 b''
1 1 # Generated by pip2nix 0.4.0
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 {
5 5 Babel = super.buildPythonPackage {
6 6 name = "Babel-1.3";
7 7 buildInputs = with self; [];
8 8 doCheck = false;
9 9 propagatedBuildInputs = with self; [pytz];
10 10 src = fetchurl {
11 11 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
12 12 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 16 };
17 17 };
18 18 Beaker = super.buildPythonPackage {
19 19 name = "Beaker-1.9.0";
20 20 buildInputs = with self; [];
21 21 doCheck = false;
22 22 propagatedBuildInputs = with self; [funcsigs];
23 23 src = fetchurl {
24 24 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
25 25 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
26 26 };
27 27 meta = {
28 28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 29 };
30 30 };
31 31 CProfileV = super.buildPythonPackage {
32 32 name = "CProfileV-1.0.7";
33 33 buildInputs = with self; [];
34 34 doCheck = false;
35 35 propagatedBuildInputs = with self; [bottle];
36 36 src = fetchurl {
37 37 url = "https://pypi.python.org/packages/df/50/d8c1ada7d537c64b0f76453fa31dedb6af6e27b82fcf0331e5f71a4cf98b/CProfileV-1.0.7.tar.gz";
38 38 md5 = "db4c7640438aa3d8887e194c81c7a019";
39 39 };
40 40 meta = {
41 41 license = [ pkgs.lib.licenses.mit ];
42 42 };
43 43 };
44 44 Chameleon = super.buildPythonPackage {
45 45 name = "Chameleon-2.24";
46 46 buildInputs = with self; [];
47 47 doCheck = false;
48 48 propagatedBuildInputs = with self; [];
49 49 src = fetchurl {
50 50 url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
51 51 md5 = "1b01f1f6533a8a11d0d2f2366dec5342";
52 52 };
53 53 meta = {
54 54 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
55 55 };
56 56 };
57 57 FormEncode = super.buildPythonPackage {
58 58 name = "FormEncode-1.2.4";
59 59 buildInputs = with self; [];
60 60 doCheck = false;
61 61 propagatedBuildInputs = with self; [];
62 62 src = fetchurl {
63 63 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
64 64 md5 = "6bc17fb9aed8aea198975e888e2077f4";
65 65 };
66 66 meta = {
67 67 license = [ pkgs.lib.licenses.psfl ];
68 68 };
69 69 };
70 70 Jinja2 = super.buildPythonPackage {
71 71 name = "Jinja2-2.7.3";
72 72 buildInputs = with self; [];
73 73 doCheck = false;
74 74 propagatedBuildInputs = with self; [MarkupSafe];
75 75 src = fetchurl {
76 76 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
77 77 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
78 78 };
79 79 meta = {
80 80 license = [ pkgs.lib.licenses.bsdOriginal ];
81 81 };
82 82 };
83 83 Mako = super.buildPythonPackage {
84 84 name = "Mako-1.0.6";
85 85 buildInputs = with self; [];
86 86 doCheck = false;
87 87 propagatedBuildInputs = with self; [MarkupSafe];
88 88 src = fetchurl {
89 89 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
90 90 md5 = "a28e22a339080316b2acc352b9ee631c";
91 91 };
92 92 meta = {
93 93 license = [ pkgs.lib.licenses.mit ];
94 94 };
95 95 };
96 96 Markdown = super.buildPythonPackage {
97 97 name = "Markdown-2.6.8";
98 98 buildInputs = with self; [];
99 99 doCheck = false;
100 100 propagatedBuildInputs = with self; [];
101 101 src = fetchurl {
102 102 url = "https://pypi.python.org/packages/1d/25/3f6d2cb31ec42ca5bd3bfbea99b63892b735d76e26f20dd2dcc34ffe4f0d/Markdown-2.6.8.tar.gz";
103 103 md5 = "d9ef057a5bd185f6f536400a31fc5d45";
104 104 };
105 105 meta = {
106 106 license = [ pkgs.lib.licenses.bsdOriginal ];
107 107 };
108 108 };
109 109 MarkupSafe = super.buildPythonPackage {
110 110 name = "MarkupSafe-0.23";
111 111 buildInputs = with self; [];
112 112 doCheck = false;
113 113 propagatedBuildInputs = with self; [];
114 114 src = fetchurl {
115 115 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
116 116 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
117 117 };
118 118 meta = {
119 119 license = [ pkgs.lib.licenses.bsdOriginal ];
120 120 };
121 121 };
122 122 MySQL-python = super.buildPythonPackage {
123 123 name = "MySQL-python-1.2.5";
124 124 buildInputs = with self; [];
125 125 doCheck = false;
126 126 propagatedBuildInputs = with self; [];
127 127 src = fetchurl {
128 128 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
129 129 md5 = "654f75b302db6ed8dc5a898c625e030c";
130 130 };
131 131 meta = {
132 132 license = [ pkgs.lib.licenses.gpl1 ];
133 133 };
134 134 };
135 135 Paste = super.buildPythonPackage {
136 136 name = "Paste-2.0.3";
137 137 buildInputs = with self; [];
138 138 doCheck = false;
139 139 propagatedBuildInputs = with self; [six];
140 140 src = fetchurl {
141 141 url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz";
142 142 md5 = "1231e14eae62fa7ed76e9130b04bc61e";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 PasteDeploy = super.buildPythonPackage {
149 149 name = "PasteDeploy-1.5.2";
150 150 buildInputs = with self; [];
151 151 doCheck = false;
152 152 propagatedBuildInputs = with self; [];
153 153 src = fetchurl {
154 154 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
155 155 md5 = "352b7205c78c8de4987578d19431af3b";
156 156 };
157 157 meta = {
158 158 license = [ pkgs.lib.licenses.mit ];
159 159 };
160 160 };
161 161 PasteScript = super.buildPythonPackage {
162 162 name = "PasteScript-1.7.5";
163 163 buildInputs = with self; [];
164 164 doCheck = false;
165 165 propagatedBuildInputs = with self; [Paste PasteDeploy];
166 166 src = fetchurl {
167 167 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
168 168 md5 = "4c72d78dcb6bb993f30536842c16af4d";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.mit ];
172 172 };
173 173 };
174 174 Pygments = super.buildPythonPackage {
175 175 name = "Pygments-2.2.0";
176 176 buildInputs = with self; [];
177 177 doCheck = false;
178 178 propagatedBuildInputs = with self; [];
179 179 src = fetchurl {
180 180 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
181 181 md5 = "13037baca42f16917cbd5ad2fab50844";
182 182 };
183 183 meta = {
184 184 license = [ pkgs.lib.licenses.bsdOriginal ];
185 185 };
186 186 };
187 187 Pylons = super.buildPythonPackage {
188 188 name = "Pylons-1.0.2.dev20170630";
189 189 buildInputs = with self; [];
190 190 doCheck = false;
191 191 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
192 192 src = fetchurl {
193 193 url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f";
194 194 md5 = "f26633726fa2cd3a340316ee6a5d218f";
195 195 };
196 196 meta = {
197 197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 198 };
199 199 };
200 200 Routes = super.buildPythonPackage {
201 201 name = "Routes-1.13";
202 202 buildInputs = with self; [];
203 203 doCheck = false;
204 204 propagatedBuildInputs = with self; [repoze.lru];
205 205 src = fetchurl {
206 206 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
207 207 md5 = "d527b0ab7dd9172b1275a41f97448783";
208 208 };
209 209 meta = {
210 210 license = [ pkgs.lib.licenses.bsdOriginal ];
211 211 };
212 212 };
213 213 SQLAlchemy = super.buildPythonPackage {
214 214 name = "SQLAlchemy-0.9.9";
215 215 buildInputs = with self; [];
216 216 doCheck = false;
217 217 propagatedBuildInputs = with self; [];
218 218 src = fetchurl {
219 219 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
220 220 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
221 221 };
222 222 meta = {
223 223 license = [ pkgs.lib.licenses.mit ];
224 224 };
225 225 };
226 226 Sphinx = super.buildPythonPackage {
227 227 name = "Sphinx-1.2.2";
228 228 buildInputs = with self; [];
229 229 doCheck = false;
230 230 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
231 231 src = fetchurl {
232 232 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
233 233 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
234 234 };
235 235 meta = {
236 236 license = [ pkgs.lib.licenses.bsdOriginal ];
237 237 };
238 238 };
239 239 Tempita = super.buildPythonPackage {
240 240 name = "Tempita-0.5.2";
241 241 buildInputs = with self; [];
242 242 doCheck = false;
243 243 propagatedBuildInputs = with self; [];
244 244 src = fetchurl {
245 245 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
246 246 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
247 247 };
248 248 meta = {
249 249 license = [ pkgs.lib.licenses.mit ];
250 250 };
251 251 };
252 252 URLObject = super.buildPythonPackage {
253 253 name = "URLObject-2.4.0";
254 254 buildInputs = with self; [];
255 255 doCheck = false;
256 256 propagatedBuildInputs = with self; [];
257 257 src = fetchurl {
258 258 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
259 259 md5 = "2ed819738a9f0a3051f31dc9924e3065";
260 260 };
261 261 meta = {
262 262 license = [ ];
263 263 };
264 264 };
265 265 WebError = super.buildPythonPackage {
266 266 name = "WebError-0.10.3";
267 267 buildInputs = with self; [];
268 268 doCheck = false;
269 269 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
270 270 src = fetchurl {
271 271 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
272 272 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
273 273 };
274 274 meta = {
275 275 license = [ pkgs.lib.licenses.mit ];
276 276 };
277 277 };
278 278 WebHelpers = super.buildPythonPackage {
279 279 name = "WebHelpers-1.3";
280 280 buildInputs = with self; [];
281 281 doCheck = false;
282 282 propagatedBuildInputs = with self; [MarkupSafe];
283 283 src = fetchurl {
284 284 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
285 285 md5 = "32749ffadfc40fea51075a7def32588b";
286 286 };
287 287 meta = {
288 288 license = [ pkgs.lib.licenses.bsdOriginal ];
289 289 };
290 290 };
291 291 WebHelpers2 = super.buildPythonPackage {
292 292 name = "WebHelpers2-2.0";
293 293 buildInputs = with self; [];
294 294 doCheck = false;
295 295 propagatedBuildInputs = with self; [MarkupSafe six];
296 296 src = fetchurl {
297 297 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
298 298 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 304 WebOb = super.buildPythonPackage {
305 305 name = "WebOb-1.7.3";
306 306 buildInputs = with self; [];
307 307 doCheck = false;
308 308 propagatedBuildInputs = with self; [];
309 309 src = fetchurl {
310 310 url = "https://pypi.python.org/packages/46/87/2f96d8d43b2078fae6e1d33fa86b95c228cebed060f4e3c7576cc44ea83b/WebOb-1.7.3.tar.gz";
311 311 md5 = "350028baffc508e3d23c078118e35316";
312 312 };
313 313 meta = {
314 314 license = [ pkgs.lib.licenses.mit ];
315 315 };
316 316 };
317 317 WebTest = super.buildPythonPackage {
318 318 name = "WebTest-2.0.27";
319 319 buildInputs = with self; [];
320 320 doCheck = false;
321 321 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
322 322 src = fetchurl {
323 323 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
324 324 md5 = "54e6515ac71c51b6fc90179483c749ad";
325 325 };
326 326 meta = {
327 327 license = [ pkgs.lib.licenses.mit ];
328 328 };
329 329 };
330 330 Whoosh = super.buildPythonPackage {
331 331 name = "Whoosh-2.7.4";
332 332 buildInputs = with self; [];
333 333 doCheck = false;
334 334 propagatedBuildInputs = with self; [];
335 335 src = fetchurl {
336 336 url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
337 337 md5 = "c2710105f20b3e29936bd2357383c325";
338 338 };
339 339 meta = {
340 340 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
341 341 };
342 342 };
343 343 alembic = super.buildPythonPackage {
344 344 name = "alembic-0.9.2";
345 345 buildInputs = with self; [];
346 346 doCheck = false;
347 347 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor python-dateutil];
348 348 src = fetchurl {
349 349 url = "https://pypi.python.org/packages/78/48/b5b26e7218b415f40b60b92c53853d242e5456c0f19f6c66101d98ff5f2a/alembic-0.9.2.tar.gz";
350 350 md5 = "40daf8bae50969beea40efaaf0839ff4";
351 351 };
352 352 meta = {
353 353 license = [ pkgs.lib.licenses.mit ];
354 354 };
355 355 };
356 356 amqplib = super.buildPythonPackage {
357 357 name = "amqplib-1.0.2";
358 358 buildInputs = with self; [];
359 359 doCheck = false;
360 360 propagatedBuildInputs = with self; [];
361 361 src = fetchurl {
362 362 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
363 363 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
364 364 };
365 365 meta = {
366 366 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
367 367 };
368 368 };
369 369 anyjson = super.buildPythonPackage {
370 370 name = "anyjson-0.3.3";
371 371 buildInputs = with self; [];
372 372 doCheck = false;
373 373 propagatedBuildInputs = with self; [];
374 374 src = fetchurl {
375 375 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
376 376 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 380 };
381 381 };
382 382 appenlight-client = super.buildPythonPackage {
383 383 name = "appenlight-client-0.6.21";
384 384 buildInputs = with self; [];
385 385 doCheck = false;
386 386 propagatedBuildInputs = with self; [WebOb requests six];
387 387 src = fetchurl {
388 388 url = "https://pypi.python.org/packages/c9/23/91b66cfa0b963662c10b2a06ccaadf3f3a4848a7a2aa16255cb43d5160ec/appenlight_client-0.6.21.tar.gz";
389 389 md5 = "273999ac854fdaefa8d0fb61965a4ed9";
390 390 };
391 391 meta = {
392 392 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
393 393 };
394 394 };
395 395 authomatic = super.buildPythonPackage {
396 396 name = "authomatic-0.1.0.post1";
397 397 buildInputs = with self; [];
398 398 doCheck = false;
399 399 propagatedBuildInputs = with self; [];
400 400 src = fetchurl {
401 401 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
402 402 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.mit ];
406 406 };
407 407 };
408 backport-ipaddress = super.buildPythonPackage {
409 name = "backport-ipaddress-0.1";
410 buildInputs = with self; [];
411 doCheck = false;
412 propagatedBuildInputs = with self; [];
413 src = fetchurl {
414 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
415 md5 = "9c1f45f4361f71b124d7293a60006c05";
416 };
417 meta = {
418 license = [ pkgs.lib.licenses.psfl ];
419 };
420 };
421 408 backports.shutil-get-terminal-size = super.buildPythonPackage {
422 409 name = "backports.shutil-get-terminal-size-1.0.0";
423 410 buildInputs = with self; [];
424 411 doCheck = false;
425 412 propagatedBuildInputs = with self; [];
426 413 src = fetchurl {
427 414 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
428 415 md5 = "03267762480bd86b50580dc19dff3c66";
429 416 };
430 417 meta = {
431 418 license = [ pkgs.lib.licenses.mit ];
432 419 };
433 420 };
434 421 beautifulsoup4 = super.buildPythonPackage {
435 422 name = "beautifulsoup4-4.6.0";
436 423 buildInputs = with self; [];
437 424 doCheck = false;
438 425 propagatedBuildInputs = with self; [];
439 426 src = fetchurl {
440 427 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
441 428 md5 = "c17714d0f91a23b708a592cb3c697728";
442 429 };
443 430 meta = {
444 431 license = [ pkgs.lib.licenses.mit ];
445 432 };
446 433 };
447 434 bleach = super.buildPythonPackage {
448 435 name = "bleach-1.5.0";
449 436 buildInputs = with self; [];
450 437 doCheck = false;
451 438 propagatedBuildInputs = with self; [six html5lib];
452 439 src = fetchurl {
453 440 url = "https://pypi.python.org/packages/99/00/25a8fce4de102bf6e3cc76bc4ea60685b2fee33bde1b34830c70cacc26a7/bleach-1.5.0.tar.gz";
454 441 md5 = "b663300efdf421b3b727b19d7be9c7e7";
455 442 };
456 443 meta = {
457 444 license = [ pkgs.lib.licenses.asl20 ];
458 445 };
459 446 };
460 447 bottle = super.buildPythonPackage {
461 448 name = "bottle-0.12.8";
462 449 buildInputs = with self; [];
463 450 doCheck = false;
464 451 propagatedBuildInputs = with self; [];
465 452 src = fetchurl {
466 453 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
467 454 md5 = "13132c0a8f607bf860810a6ee9064c5b";
468 455 };
469 456 meta = {
470 457 license = [ pkgs.lib.licenses.mit ];
471 458 };
472 459 };
473 460 bumpversion = super.buildPythonPackage {
474 461 name = "bumpversion-0.5.3";
475 462 buildInputs = with self; [];
476 463 doCheck = false;
477 464 propagatedBuildInputs = with self; [];
478 465 src = fetchurl {
479 466 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
480 467 md5 = "c66a3492eafcf5ad4b024be9fca29820";
481 468 };
482 469 meta = {
483 470 license = [ pkgs.lib.licenses.mit ];
484 471 };
485 472 };
486 473 celery = super.buildPythonPackage {
487 474 name = "celery-2.2.10";
488 475 buildInputs = with self; [];
489 476 doCheck = false;
490 477 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
491 478 src = fetchurl {
492 479 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
493 480 md5 = "898bc87e54f278055b561316ba73e222";
494 481 };
495 482 meta = {
496 483 license = [ pkgs.lib.licenses.bsdOriginal ];
497 484 };
498 485 };
499 486 channelstream = super.buildPythonPackage {
500 487 name = "channelstream-0.5.2";
501 488 buildInputs = with self; [];
502 489 doCheck = false;
503 490 propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six];
504 491 src = fetchurl {
505 492 url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
506 493 md5 = "1c5eb2a8a405be6f1073da94da6d81d3";
507 494 };
508 495 meta = {
509 496 license = [ pkgs.lib.licenses.bsdOriginal ];
510 497 };
511 498 };
512 499 click = super.buildPythonPackage {
513 500 name = "click-5.1";
514 501 buildInputs = with self; [];
515 502 doCheck = false;
516 503 propagatedBuildInputs = with self; [];
517 504 src = fetchurl {
518 505 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
519 506 md5 = "9c5323008cccfe232a8b161fc8196d41";
520 507 };
521 508 meta = {
522 509 license = [ pkgs.lib.licenses.bsdOriginal ];
523 510 };
524 511 };
525 512 colander = super.buildPythonPackage {
526 513 name = "colander-1.3.3";
527 514 buildInputs = with self; [];
528 515 doCheck = false;
529 516 propagatedBuildInputs = with self; [translationstring iso8601];
530 517 src = fetchurl {
531 518 url = "https://pypi.python.org/packages/54/a9/9862a561e015b2c7b56404c0b13828a8bdc51e05ab3703bd792cec064487/colander-1.3.3.tar.gz";
532 519 md5 = "f5d783768c51d73695f49bbe95778ab4";
533 520 };
534 521 meta = {
535 522 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 523 };
537 524 };
538 525 configobj = super.buildPythonPackage {
539 526 name = "configobj-5.0.6";
540 527 buildInputs = with self; [];
541 528 doCheck = false;
542 529 propagatedBuildInputs = with self; [six];
543 530 src = fetchurl {
544 531 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
545 532 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
546 533 };
547 534 meta = {
548 535 license = [ pkgs.lib.licenses.bsdOriginal ];
549 536 };
550 537 };
551 538 configparser = super.buildPythonPackage {
552 539 name = "configparser-3.5.0";
553 540 buildInputs = with self; [];
554 541 doCheck = false;
555 542 propagatedBuildInputs = with self; [];
556 543 src = fetchurl {
557 544 url = "https://pypi.python.org/packages/7c/69/c2ce7e91c89dc073eb1aa74c0621c3eefbffe8216b3f9af9d3885265c01c/configparser-3.5.0.tar.gz";
558 545 md5 = "cfdd915a5b7a6c09917a64a573140538";
559 546 };
560 547 meta = {
561 548 license = [ pkgs.lib.licenses.mit ];
562 549 };
563 550 };
564 551 cov-core = super.buildPythonPackage {
565 552 name = "cov-core-1.15.0";
566 553 buildInputs = with self; [];
567 554 doCheck = false;
568 555 propagatedBuildInputs = with self; [coverage];
569 556 src = fetchurl {
570 557 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
571 558 md5 = "f519d4cb4c4e52856afb14af52919fe6";
572 559 };
573 560 meta = {
574 561 license = [ pkgs.lib.licenses.mit ];
575 562 };
576 563 };
577 564 coverage = super.buildPythonPackage {
578 565 name = "coverage-3.7.1";
579 566 buildInputs = with self; [];
580 567 doCheck = false;
581 568 propagatedBuildInputs = with self; [];
582 569 src = fetchurl {
583 570 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
584 571 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
585 572 };
586 573 meta = {
587 574 license = [ pkgs.lib.licenses.bsdOriginal ];
588 575 };
589 576 };
590 577 cssselect = super.buildPythonPackage {
591 578 name = "cssselect-1.0.1";
592 579 buildInputs = with self; [];
593 580 doCheck = false;
594 581 propagatedBuildInputs = with self; [];
595 582 src = fetchurl {
596 583 url = "https://pypi.python.org/packages/77/ff/9c865275cd19290feba56344eba570e719efb7ca5b34d67ed12b22ebbb0d/cssselect-1.0.1.tar.gz";
597 584 md5 = "3fa03bf82a9f0b1223c0f1eb1369e139";
598 585 };
599 586 meta = {
600 587 license = [ pkgs.lib.licenses.bsdOriginal ];
601 588 };
602 589 };
603 590 decorator = super.buildPythonPackage {
604 591 name = "decorator-4.0.11";
605 592 buildInputs = with self; [];
606 593 doCheck = false;
607 594 propagatedBuildInputs = with self; [];
608 595 src = fetchurl {
609 596 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
610 597 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
611 598 };
612 599 meta = {
613 600 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
614 601 };
615 602 };
616 603 deform = super.buildPythonPackage {
617 604 name = "deform-2.0.4";
618 605 buildInputs = with self; [];
619 606 doCheck = false;
620 607 propagatedBuildInputs = with self; [Chameleon colander iso8601 peppercorn translationstring zope.deprecation];
621 608 src = fetchurl {
622 609 url = "https://pypi.python.org/packages/66/3b/eefcb07abcab7a97f6665aa2d0cf1af741d9d6e78a2e4657fd2b89f89880/deform-2.0.4.tar.gz";
623 610 md5 = "34756e42cf50dd4b4430809116c4ec0a";
624 611 };
625 612 meta = {
626 613 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
627 614 };
628 615 };
629 616 docutils = super.buildPythonPackage {
630 617 name = "docutils-0.13.1";
631 618 buildInputs = with self; [];
632 619 doCheck = false;
633 620 propagatedBuildInputs = with self; [];
634 621 src = fetchurl {
635 622 url = "https://pypi.python.org/packages/05/25/7b5484aca5d46915493f1fd4ecb63c38c333bd32aa9ad6e19da8d08895ae/docutils-0.13.1.tar.gz";
636 623 md5 = "ea4a893c633c788be9b8078b6b305d53";
637 624 };
638 625 meta = {
639 626 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
640 627 };
641 628 };
642 629 dogpile.cache = super.buildPythonPackage {
643 630 name = "dogpile.cache-0.6.4";
644 631 buildInputs = with self; [];
645 632 doCheck = false;
646 633 propagatedBuildInputs = with self; [];
647 634 src = fetchurl {
648 635 url = "https://pypi.python.org/packages/b6/3d/35c05ca01c070bb70d9d422f2c4858ecb021b05b21af438fec5ccd7b945c/dogpile.cache-0.6.4.tar.gz";
649 636 md5 = "66e0a6cae6c08cb1ea25f89d0eadfeb0";
650 637 };
651 638 meta = {
652 639 license = [ pkgs.lib.licenses.bsdOriginal ];
653 640 };
654 641 };
655 642 dogpile.core = super.buildPythonPackage {
656 643 name = "dogpile.core-0.4.1";
657 644 buildInputs = with self; [];
658 645 doCheck = false;
659 646 propagatedBuildInputs = with self; [];
660 647 src = fetchurl {
661 648 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
662 649 md5 = "01cb19f52bba3e95c9b560f39341f045";
663 650 };
664 651 meta = {
665 652 license = [ pkgs.lib.licenses.bsdOriginal ];
666 653 };
667 654 };
668 655 ecdsa = super.buildPythonPackage {
669 656 name = "ecdsa-0.11";
670 657 buildInputs = with self; [];
671 658 doCheck = false;
672 659 propagatedBuildInputs = with self; [];
673 660 src = fetchurl {
674 661 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
675 662 md5 = "8ef586fe4dbb156697d756900cb41d7c";
676 663 };
677 664 meta = {
678 665 license = [ pkgs.lib.licenses.mit ];
679 666 };
680 667 };
681 668 elasticsearch = super.buildPythonPackage {
682 669 name = "elasticsearch-2.3.0";
683 670 buildInputs = with self; [];
684 671 doCheck = false;
685 672 propagatedBuildInputs = with self; [urllib3];
686 673 src = fetchurl {
687 674 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
688 675 md5 = "2550f3b51629cf1ef9636608af92c340";
689 676 };
690 677 meta = {
691 678 license = [ pkgs.lib.licenses.asl20 ];
692 679 };
693 680 };
694 681 elasticsearch-dsl = super.buildPythonPackage {
695 682 name = "elasticsearch-dsl-2.2.0";
696 683 buildInputs = with self; [];
697 684 doCheck = false;
698 685 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
699 686 src = fetchurl {
700 687 url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz";
701 688 md5 = "fa6bd3c87ea3caa8f0f051bc37c53221";
702 689 };
703 690 meta = {
704 691 license = [ pkgs.lib.licenses.asl20 ];
705 692 };
706 693 };
707 694 entrypoints = super.buildPythonPackage {
708 695 name = "entrypoints-0.2.2";
709 696 buildInputs = with self; [];
710 697 doCheck = false;
711 698 propagatedBuildInputs = with self; [configparser];
712 699 src = fetchurl {
713 700 url = "https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313";
714 701 md5 = "7db37771aea9ac9fefe093e5d6987313";
715 702 };
716 703 meta = {
717 704 license = [ pkgs.lib.licenses.mit ];
718 705 };
719 706 };
720 707 enum34 = super.buildPythonPackage {
721 708 name = "enum34-1.1.6";
722 709 buildInputs = with self; [];
723 710 doCheck = false;
724 711 propagatedBuildInputs = with self; [];
725 712 src = fetchurl {
726 713 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
727 714 md5 = "5f13a0841a61f7fc295c514490d120d0";
728 715 };
729 716 meta = {
730 717 license = [ pkgs.lib.licenses.bsdOriginal ];
731 718 };
732 719 };
733 720 funcsigs = super.buildPythonPackage {
734 721 name = "funcsigs-1.0.2";
735 722 buildInputs = with self; [];
736 723 doCheck = false;
737 724 propagatedBuildInputs = with self; [];
738 725 src = fetchurl {
739 726 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
740 727 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
741 728 };
742 729 meta = {
743 730 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
744 731 };
745 732 };
746 733 functools32 = super.buildPythonPackage {
747 734 name = "functools32-3.2.3.post2";
748 735 buildInputs = with self; [];
749 736 doCheck = false;
750 737 propagatedBuildInputs = with self; [];
751 738 src = fetchurl {
752 739 url = "https://pypi.python.org/packages/5e/1a/0aa2c8195a204a9f51284018562dea77e25511f02fe924fac202fc012172/functools32-3.2.3-2.zip";
753 740 md5 = "d55232eb132ec779e6893c902a0bc5ad";
754 741 };
755 742 meta = {
756 743 license = [ pkgs.lib.licenses.psfl ];
757 744 };
758 745 };
759 746 future = super.buildPythonPackage {
760 747 name = "future-0.14.3";
761 748 buildInputs = with self; [];
762 749 doCheck = false;
763 750 propagatedBuildInputs = with self; [];
764 751 src = fetchurl {
765 752 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
766 753 md5 = "e94079b0bd1fc054929e8769fc0f6083";
767 754 };
768 755 meta = {
769 756 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
770 757 };
771 758 };
772 759 futures = super.buildPythonPackage {
773 760 name = "futures-3.0.2";
774 761 buildInputs = with self; [];
775 762 doCheck = false;
776 763 propagatedBuildInputs = with self; [];
777 764 src = fetchurl {
778 765 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
779 766 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
780 767 };
781 768 meta = {
782 769 license = [ pkgs.lib.licenses.bsdOriginal ];
783 770 };
784 771 };
785 772 gevent = super.buildPythonPackage {
786 773 name = "gevent-1.2.2";
787 774 buildInputs = with self; [];
788 775 doCheck = false;
789 776 propagatedBuildInputs = with self; [greenlet];
790 777 src = fetchurl {
791 778 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
792 779 md5 = "7f0baf355384fe5ff2ecf66853422554";
793 780 };
794 781 meta = {
795 782 license = [ pkgs.lib.licenses.mit ];
796 783 };
797 784 };
798 785 gnureadline = super.buildPythonPackage {
799 786 name = "gnureadline-6.3.3";
800 787 buildInputs = with self; [];
801 788 doCheck = false;
802 789 propagatedBuildInputs = with self; [];
803 790 src = fetchurl {
804 791 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
805 792 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
806 793 };
807 794 meta = {
808 795 license = [ pkgs.lib.licenses.gpl1 ];
809 796 };
810 797 };
811 798 gprof2dot = super.buildPythonPackage {
812 799 name = "gprof2dot-2016.10.13";
813 800 buildInputs = with self; [];
814 801 doCheck = false;
815 802 propagatedBuildInputs = with self; [];
816 803 src = fetchurl {
817 804 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
818 805 md5 = "0125401f15fd2afe1df686a76c64a4fd";
819 806 };
820 807 meta = {
821 808 license = [ { fullName = "LGPL"; } ];
822 809 };
823 810 };
824 811 graphviz = super.buildPythonPackage {
825 name = "graphviz-0.7.1";
812 name = "graphviz-0.8";
826 813 buildInputs = with self; [];
827 814 doCheck = false;
828 815 propagatedBuildInputs = with self; [];
829 816 src = fetchurl {
830 url = "https://pypi.python.org/packages/7d/2d/f5cfa56467ca5a65eb44e1103d89d2f65dbc4f04cf7a1f3d38e973c3d1a8/graphviz-0.7.1.zip";
831 md5 = "d5926e89975121d56dec777a79bfc9d1";
817 url = "https://pypi.python.org/packages/da/84/0e997520323d6b01124eb01c68d5c101814d0aab53083cd62bd75a90f70b/graphviz-0.8.zip";
818 md5 = "9486a885360a5ee54a81eb2950470c71";
832 819 };
833 820 meta = {
834 821 license = [ pkgs.lib.licenses.mit ];
835 822 };
836 823 };
837 824 greenlet = super.buildPythonPackage {
838 825 name = "greenlet-0.4.12";
839 826 buildInputs = with self; [];
840 827 doCheck = false;
841 828 propagatedBuildInputs = with self; [];
842 829 src = fetchurl {
843 830 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
844 831 md5 = "e8637647d58a26c4a1f51ca393e53c00";
845 832 };
846 833 meta = {
847 834 license = [ pkgs.lib.licenses.mit ];
848 835 };
849 836 };
850 837 gunicorn = super.buildPythonPackage {
851 838 name = "gunicorn-19.7.1";
852 839 buildInputs = with self; [];
853 840 doCheck = false;
854 841 propagatedBuildInputs = with self; [];
855 842 src = fetchurl {
856 843 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
857 844 md5 = "174d3c3cd670a5be0404d84c484e590c";
858 845 };
859 846 meta = {
860 847 license = [ pkgs.lib.licenses.mit ];
861 848 };
862 849 };
863 850 html5lib = super.buildPythonPackage {
864 851 name = "html5lib-0.9999999";
865 852 buildInputs = with self; [];
866 853 doCheck = false;
867 854 propagatedBuildInputs = with self; [six];
868 855 src = fetchurl {
869 856 url = "https://pypi.python.org/packages/ae/ae/bcb60402c60932b32dfaf19bb53870b29eda2cd17551ba5639219fb5ebf9/html5lib-0.9999999.tar.gz";
870 857 md5 = "ef43cb05e9e799f25d65d1135838a96f";
871 858 };
872 859 meta = {
873 860 license = [ pkgs.lib.licenses.mit ];
874 861 };
875 862 };
876 863 infrae.cache = super.buildPythonPackage {
877 864 name = "infrae.cache-1.0.1";
878 865 buildInputs = with self; [];
879 866 doCheck = false;
880 867 propagatedBuildInputs = with self; [Beaker repoze.lru];
881 868 src = fetchurl {
882 869 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
883 870 md5 = "b09076a766747e6ed2a755cc62088e32";
884 871 };
885 872 meta = {
886 873 license = [ pkgs.lib.licenses.zpt21 ];
887 874 };
888 875 };
889 876 invoke = super.buildPythonPackage {
890 877 name = "invoke-0.13.0";
891 878 buildInputs = with self; [];
892 879 doCheck = false;
893 880 propagatedBuildInputs = with self; [];
894 881 src = fetchurl {
895 882 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
896 883 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
897 884 };
898 885 meta = {
899 886 license = [ pkgs.lib.licenses.bsdOriginal ];
900 887 };
901 888 };
889 ipaddress = super.buildPythonPackage {
890 name = "ipaddress-1.0.18";
891 buildInputs = with self; [];
892 doCheck = false;
893 propagatedBuildInputs = with self; [];
894 src = fetchurl {
895 url = "https://pypi.python.org/packages/4e/13/774faf38b445d0b3a844b65747175b2e0500164b7c28d78e34987a5bfe06/ipaddress-1.0.18.tar.gz";
896 md5 = "310c2dfd64eb6f0df44aa8c59f2334a7";
897 };
898 meta = {
899 license = [ pkgs.lib.licenses.psfl ];
900 };
901 };
902 902 ipdb = super.buildPythonPackage {
903 903 name = "ipdb-0.10.3";
904 904 buildInputs = with self; [];
905 905 doCheck = false;
906 906 propagatedBuildInputs = with self; [setuptools ipython];
907 907 src = fetchurl {
908 908 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
909 909 md5 = "def1f6ac075d54bdee07e6501263d4fa";
910 910 };
911 911 meta = {
912 912 license = [ pkgs.lib.licenses.bsdOriginal ];
913 913 };
914 914 };
915 915 ipython = super.buildPythonPackage {
916 916 name = "ipython-5.1.0";
917 917 buildInputs = with self; [];
918 918 doCheck = false;
919 919 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
920 920 src = fetchurl {
921 921 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
922 922 md5 = "47c8122420f65b58784cb4b9b4af35e3";
923 923 };
924 924 meta = {
925 925 license = [ pkgs.lib.licenses.bsdOriginal ];
926 926 };
927 927 };
928 928 ipython-genutils = super.buildPythonPackage {
929 929 name = "ipython-genutils-0.2.0";
930 930 buildInputs = with self; [];
931 931 doCheck = false;
932 932 propagatedBuildInputs = with self; [];
933 933 src = fetchurl {
934 934 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
935 935 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
936 936 };
937 937 meta = {
938 938 license = [ pkgs.lib.licenses.bsdOriginal ];
939 939 };
940 940 };
941 941 iso8601 = super.buildPythonPackage {
942 942 name = "iso8601-0.1.11";
943 943 buildInputs = with self; [];
944 944 doCheck = false;
945 945 propagatedBuildInputs = with self; [];
946 946 src = fetchurl {
947 947 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
948 948 md5 = "b06d11cd14a64096f907086044f0fe38";
949 949 };
950 950 meta = {
951 951 license = [ pkgs.lib.licenses.mit ];
952 952 };
953 953 };
954 954 itsdangerous = super.buildPythonPackage {
955 955 name = "itsdangerous-0.24";
956 956 buildInputs = with self; [];
957 957 doCheck = false;
958 958 propagatedBuildInputs = with self; [];
959 959 src = fetchurl {
960 960 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
961 961 md5 = "a3d55aa79369aef5345c036a8a26307f";
962 962 };
963 963 meta = {
964 964 license = [ pkgs.lib.licenses.bsdOriginal ];
965 965 };
966 966 };
967 967 jsonschema = super.buildPythonPackage {
968 968 name = "jsonschema-2.6.0";
969 969 buildInputs = with self; [];
970 970 doCheck = false;
971 971 propagatedBuildInputs = with self; [functools32];
972 972 src = fetchurl {
973 973 url = "https://pypi.python.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
974 974 md5 = "50c6b69a373a8b55ff1e0ec6e78f13f4";
975 975 };
976 976 meta = {
977 977 license = [ pkgs.lib.licenses.mit ];
978 978 };
979 979 };
980 980 jupyter-client = super.buildPythonPackage {
981 981 name = "jupyter-client-5.0.0";
982 982 buildInputs = with self; [];
983 983 doCheck = false;
984 984 propagatedBuildInputs = with self; [traitlets jupyter-core pyzmq python-dateutil];
985 985 src = fetchurl {
986 986 url = "https://pypi.python.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
987 987 md5 = "1acd331b5c9fb4d79dae9939e79f2426";
988 988 };
989 989 meta = {
990 990 license = [ pkgs.lib.licenses.bsdOriginal ];
991 991 };
992 992 };
993 993 jupyter-core = super.buildPythonPackage {
994 994 name = "jupyter-core-4.3.0";
995 995 buildInputs = with self; [];
996 996 doCheck = false;
997 997 propagatedBuildInputs = with self; [traitlets];
998 998 src = fetchurl {
999 999 url = "https://pypi.python.org/packages/2f/39/5138f975100ce14d150938df48a83cd852a3fd8e24b1244f4113848e69e2/jupyter_core-4.3.0.tar.gz";
1000 1000 md5 = "18819511a809afdeed9a995a9c27bcfb";
1001 1001 };
1002 1002 meta = {
1003 1003 license = [ pkgs.lib.licenses.bsdOriginal ];
1004 1004 };
1005 1005 };
1006 1006 hupper = super.buildPythonPackage {
1007 1007 name = "hupper-1.0";
1008 1008 buildInputs = with self; [];
1009 1009 doCheck = false;
1010 1010 propagatedBuildInputs = with self; [];
1011 1011 src = fetchurl {
1012 1012 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
1013 1013 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
1014 1014 };
1015 1015 meta = {
1016 1016 license = [ pkgs.lib.licenses.mit ];
1017 1017 };
1018 1018 };
1019 1019 kombu = super.buildPythonPackage {
1020 1020 name = "kombu-1.5.1";
1021 1021 buildInputs = with self; [];
1022 1022 doCheck = false;
1023 1023 propagatedBuildInputs = with self; [anyjson amqplib];
1024 1024 src = fetchurl {
1025 1025 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
1026 1026 md5 = "50662f3c7e9395b3d0721fb75d100b63";
1027 1027 };
1028 1028 meta = {
1029 1029 license = [ pkgs.lib.licenses.bsdOriginal ];
1030 1030 };
1031 1031 };
1032 1032 lxml = super.buildPythonPackage {
1033 1033 name = "lxml-3.7.3";
1034 1034 buildInputs = with self; [];
1035 1035 doCheck = false;
1036 1036 propagatedBuildInputs = with self; [];
1037 1037 src = fetchurl {
1038 1038 url = "https://pypi.python.org/packages/39/e8/a8e0b1fa65dd021d48fe21464f71783655f39a41f218293c1c590d54eb82/lxml-3.7.3.tar.gz";
1039 1039 md5 = "075692ce442e69bbd604d44e21c02753";
1040 1040 };
1041 1041 meta = {
1042 1042 license = [ pkgs.lib.licenses.bsdOriginal ];
1043 1043 };
1044 1044 };
1045 1045 meld3 = super.buildPythonPackage {
1046 1046 name = "meld3-1.0.2";
1047 1047 buildInputs = with self; [];
1048 1048 doCheck = false;
1049 1049 propagatedBuildInputs = with self; [];
1050 1050 src = fetchurl {
1051 1051 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
1052 1052 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
1053 1053 };
1054 1054 meta = {
1055 1055 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1056 1056 };
1057 1057 };
1058 1058 mistune = super.buildPythonPackage {
1059 1059 name = "mistune-0.7.4";
1060 1060 buildInputs = with self; [];
1061 1061 doCheck = false;
1062 1062 propagatedBuildInputs = with self; [];
1063 1063 src = fetchurl {
1064 1064 url = "https://pypi.python.org/packages/25/a4/12a584c0c59c9fed529f8b3c47ca8217c0cf8bcc5e1089d3256410cfbdbc/mistune-0.7.4.tar.gz";
1065 1065 md5 = "92d01cb717e9e74429e9bde9d29ac43b";
1066 1066 };
1067 1067 meta = {
1068 1068 license = [ pkgs.lib.licenses.bsdOriginal ];
1069 1069 };
1070 1070 };
1071 1071 mock = super.buildPythonPackage {
1072 1072 name = "mock-1.0.1";
1073 1073 buildInputs = with self; [];
1074 1074 doCheck = false;
1075 1075 propagatedBuildInputs = with self; [];
1076 1076 src = fetchurl {
1077 1077 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
1078 1078 md5 = "869f08d003c289a97c1a6610faf5e913";
1079 1079 };
1080 1080 meta = {
1081 1081 license = [ pkgs.lib.licenses.bsdOriginal ];
1082 1082 };
1083 1083 };
1084 1084 msgpack-python = super.buildPythonPackage {
1085 1085 name = "msgpack-python-0.4.8";
1086 1086 buildInputs = with self; [];
1087 1087 doCheck = false;
1088 1088 propagatedBuildInputs = with self; [];
1089 1089 src = fetchurl {
1090 1090 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
1091 1091 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
1092 1092 };
1093 1093 meta = {
1094 1094 license = [ pkgs.lib.licenses.asl20 ];
1095 1095 };
1096 1096 };
1097 1097 nbconvert = super.buildPythonPackage {
1098 1098 name = "nbconvert-5.1.1";
1099 1099 buildInputs = with self; [];
1100 1100 doCheck = false;
1101 1101 propagatedBuildInputs = with self; [mistune Jinja2 Pygments traitlets jupyter-core nbformat entrypoints bleach pandocfilters testpath];
1102 1102 src = fetchurl {
1103 1103 url = "https://pypi.python.org/packages/95/58/df1c91f1658ee5df19097f915a1e71c91fc824a708d82d2b2e35f8b80e9a/nbconvert-5.1.1.tar.gz";
1104 1104 md5 = "d0263fb03a44db2f94eea09a608ed813";
1105 1105 };
1106 1106 meta = {
1107 1107 license = [ pkgs.lib.licenses.bsdOriginal ];
1108 1108 };
1109 1109 };
1110 1110 nbformat = super.buildPythonPackage {
1111 1111 name = "nbformat-4.3.0";
1112 1112 buildInputs = with self; [];
1113 1113 doCheck = false;
1114 1114 propagatedBuildInputs = with self; [ipython-genutils traitlets jsonschema jupyter-core];
1115 1115 src = fetchurl {
1116 1116 url = "https://pypi.python.org/packages/f9/c5/89df4abf906f766727f976e170caa85b4f1c1d1feb1f45d716016e68e19f/nbformat-4.3.0.tar.gz";
1117 1117 md5 = "9a00d20425914cd5ba5f97769d9963ca";
1118 1118 };
1119 1119 meta = {
1120 1120 license = [ pkgs.lib.licenses.bsdOriginal ];
1121 1121 };
1122 1122 };
1123 1123 nose = super.buildPythonPackage {
1124 1124 name = "nose-1.3.6";
1125 1125 buildInputs = with self; [];
1126 1126 doCheck = false;
1127 1127 propagatedBuildInputs = with self; [];
1128 1128 src = fetchurl {
1129 1129 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
1130 1130 md5 = "0ca546d81ca8309080fc80cb389e7a16";
1131 1131 };
1132 1132 meta = {
1133 1133 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
1134 1134 };
1135 1135 };
1136 1136 objgraph = super.buildPythonPackage {
1137 1137 name = "objgraph-3.1.0";
1138 1138 buildInputs = with self; [];
1139 1139 doCheck = false;
1140 1140 propagatedBuildInputs = with self; [graphviz];
1141 1141 src = fetchurl {
1142 1142 url = "https://pypi.python.org/packages/f4/b3/082e54e62094cb2ec84f8d5a49e0142cef99016491cecba83309cff920ae/objgraph-3.1.0.tar.gz";
1143 1143 md5 = "eddbd96039796bfbd13eee403701e64a";
1144 1144 };
1145 1145 meta = {
1146 1146 license = [ pkgs.lib.licenses.mit ];
1147 1147 };
1148 1148 };
1149 1149 packaging = super.buildPythonPackage {
1150 1150 name = "packaging-15.2";
1151 1151 buildInputs = with self; [];
1152 1152 doCheck = false;
1153 1153 propagatedBuildInputs = with self; [];
1154 1154 src = fetchurl {
1155 1155 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1156 1156 md5 = "c16093476f6ced42128bf610e5db3784";
1157 1157 };
1158 1158 meta = {
1159 1159 license = [ pkgs.lib.licenses.asl20 ];
1160 1160 };
1161 1161 };
1162 1162 pandocfilters = super.buildPythonPackage {
1163 1163 name = "pandocfilters-1.4.1";
1164 1164 buildInputs = with self; [];
1165 1165 doCheck = false;
1166 1166 propagatedBuildInputs = with self; [];
1167 1167 src = fetchurl {
1168 1168 url = "https://pypi.python.org/packages/e3/1f/21d1b7e8ca571e80b796c758d361fdf5554335ff138158654684bc5401d8/pandocfilters-1.4.1.tar.gz";
1169 1169 md5 = "7680d9f9ec07397dd17f380ee3818b9d";
1170 1170 };
1171 1171 meta = {
1172 1172 license = [ pkgs.lib.licenses.bsdOriginal ];
1173 1173 };
1174 1174 };
1175 1175 paramiko = super.buildPythonPackage {
1176 1176 name = "paramiko-1.15.1";
1177 1177 buildInputs = with self; [];
1178 1178 doCheck = false;
1179 1179 propagatedBuildInputs = with self; [pycrypto ecdsa];
1180 1180 src = fetchurl {
1181 1181 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
1182 1182 md5 = "48c274c3f9b1282932567b21f6acf3b5";
1183 1183 };
1184 1184 meta = {
1185 1185 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1186 1186 };
1187 1187 };
1188 1188 pathlib2 = super.buildPythonPackage {
1189 1189 name = "pathlib2-2.3.0";
1190 1190 buildInputs = with self; [];
1191 1191 doCheck = false;
1192 1192 propagatedBuildInputs = with self; [six scandir];
1193 1193 src = fetchurl {
1194 1194 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
1195 1195 md5 = "89c90409d11fd5947966b6a30a47d18c";
1196 1196 };
1197 1197 meta = {
1198 1198 license = [ pkgs.lib.licenses.mit ];
1199 1199 };
1200 1200 };
1201 1201 peppercorn = super.buildPythonPackage {
1202 1202 name = "peppercorn-0.5";
1203 1203 buildInputs = with self; [];
1204 1204 doCheck = false;
1205 1205 propagatedBuildInputs = with self; [];
1206 1206 src = fetchurl {
1207 1207 url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz";
1208 1208 md5 = "f08efbca5790019ab45d76b7244abd40";
1209 1209 };
1210 1210 meta = {
1211 1211 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1212 1212 };
1213 1213 };
1214 1214 pexpect = super.buildPythonPackage {
1215 1215 name = "pexpect-4.2.1";
1216 1216 buildInputs = with self; [];
1217 1217 doCheck = false;
1218 1218 propagatedBuildInputs = with self; [ptyprocess];
1219 1219 src = fetchurl {
1220 1220 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
1221 1221 md5 = "3694410001a99dff83f0b500a1ca1c95";
1222 1222 };
1223 1223 meta = {
1224 1224 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1225 1225 };
1226 1226 };
1227 1227 pickleshare = super.buildPythonPackage {
1228 1228 name = "pickleshare-0.7.4";
1229 1229 buildInputs = with self; [];
1230 1230 doCheck = false;
1231 1231 propagatedBuildInputs = with self; [pathlib2];
1232 1232 src = fetchurl {
1233 1233 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
1234 1234 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
1235 1235 };
1236 1236 meta = {
1237 1237 license = [ pkgs.lib.licenses.mit ];
1238 1238 };
1239 1239 };
1240 1240 plaster = super.buildPythonPackage {
1241 1241 name = "plaster-0.5";
1242 1242 buildInputs = with self; [];
1243 1243 doCheck = false;
1244 1244 propagatedBuildInputs = with self; [setuptools];
1245 1245 src = fetchurl {
1246 1246 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
1247 1247 md5 = "c59345a67a860cfcaa1bd6a81451399d";
1248 1248 };
1249 1249 meta = {
1250 1250 license = [ pkgs.lib.licenses.mit ];
1251 1251 };
1252 1252 };
1253 1253 plaster-pastedeploy = super.buildPythonPackage {
1254 1254 name = "plaster-pastedeploy-0.4.1";
1255 1255 buildInputs = with self; [];
1256 1256 doCheck = false;
1257 1257 propagatedBuildInputs = with self; [PasteDeploy plaster];
1258 1258 src = fetchurl {
1259 1259 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
1260 1260 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
1261 1261 };
1262 1262 meta = {
1263 1263 license = [ pkgs.lib.licenses.mit ];
1264 1264 };
1265 1265 };
1266 1266 prompt-toolkit = super.buildPythonPackage {
1267 1267 name = "prompt-toolkit-1.0.14";
1268 1268 buildInputs = with self; [];
1269 1269 doCheck = false;
1270 1270 propagatedBuildInputs = with self; [six wcwidth];
1271 1271 src = fetchurl {
1272 1272 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
1273 1273 md5 = "f24061ae133ed32c6b764e92bd48c496";
1274 1274 };
1275 1275 meta = {
1276 1276 license = [ pkgs.lib.licenses.bsdOriginal ];
1277 1277 };
1278 1278 };
1279 1279 psutil = super.buildPythonPackage {
1280 1280 name = "psutil-4.3.1";
1281 1281 buildInputs = with self; [];
1282 1282 doCheck = false;
1283 1283 propagatedBuildInputs = with self; [];
1284 1284 src = fetchurl {
1285 1285 url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz";
1286 1286 md5 = "199a366dba829c88bddaf5b41d19ddc0";
1287 1287 };
1288 1288 meta = {
1289 1289 license = [ pkgs.lib.licenses.bsdOriginal ];
1290 1290 };
1291 1291 };
1292 1292 psycopg2 = super.buildPythonPackage {
1293 1293 name = "psycopg2-2.7.1";
1294 1294 buildInputs = with self; [];
1295 1295 doCheck = false;
1296 1296 propagatedBuildInputs = with self; [];
1297 1297 src = fetchurl {
1298 1298 url = "https://pypi.python.org/packages/f8/e9/5793369ce8a41bf5467623ded8d59a434dfef9c136351aca4e70c2657ba0/psycopg2-2.7.1.tar.gz";
1299 1299 md5 = "67848ac33af88336046802f6ef7081f3";
1300 1300 };
1301 1301 meta = {
1302 1302 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1303 1303 };
1304 1304 };
1305 1305 ptyprocess = super.buildPythonPackage {
1306 1306 name = "ptyprocess-0.5.2";
1307 1307 buildInputs = with self; [];
1308 1308 doCheck = false;
1309 1309 propagatedBuildInputs = with self; [];
1310 1310 src = fetchurl {
1311 1311 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
1312 1312 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
1313 1313 };
1314 1314 meta = {
1315 1315 license = [ ];
1316 1316 };
1317 1317 };
1318 1318 py = super.buildPythonPackage {
1319 1319 name = "py-1.4.34";
1320 1320 buildInputs = with self; [];
1321 1321 doCheck = false;
1322 1322 propagatedBuildInputs = with self; [];
1323 1323 src = fetchurl {
1324 1324 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
1325 1325 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
1326 1326 };
1327 1327 meta = {
1328 1328 license = [ pkgs.lib.licenses.mit ];
1329 1329 };
1330 1330 };
1331 1331 py-bcrypt = super.buildPythonPackage {
1332 1332 name = "py-bcrypt-0.4";
1333 1333 buildInputs = with self; [];
1334 1334 doCheck = false;
1335 1335 propagatedBuildInputs = with self; [];
1336 1336 src = fetchurl {
1337 1337 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1338 1338 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1339 1339 };
1340 1340 meta = {
1341 1341 license = [ pkgs.lib.licenses.bsdOriginal ];
1342 1342 };
1343 1343 };
1344 1344 py-gfm = super.buildPythonPackage {
1345 1345 name = "py-gfm-0.1.3";
1346 1346 buildInputs = with self; [];
1347 1347 doCheck = false;
1348 1348 propagatedBuildInputs = with self; [setuptools Markdown];
1349 1349 src = fetchurl {
1350 1350 url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16";
1351 1351 md5 = "0d0d5385bfb629eea636a80b9c2bfd16";
1352 1352 };
1353 1353 meta = {
1354 1354 license = [ pkgs.lib.licenses.bsdOriginal ];
1355 1355 };
1356 1356 };
1357 1357 pycrypto = super.buildPythonPackage {
1358 1358 name = "pycrypto-2.6.1";
1359 1359 buildInputs = with self; [];
1360 1360 doCheck = false;
1361 1361 propagatedBuildInputs = with self; [];
1362 1362 src = fetchurl {
1363 1363 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1364 1364 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1365 1365 };
1366 1366 meta = {
1367 1367 license = [ pkgs.lib.licenses.publicDomain ];
1368 1368 };
1369 1369 };
1370 1370 pycurl = super.buildPythonPackage {
1371 1371 name = "pycurl-7.19.5";
1372 1372 buildInputs = with self; [];
1373 1373 doCheck = false;
1374 1374 propagatedBuildInputs = with self; [];
1375 1375 src = fetchurl {
1376 1376 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1377 1377 md5 = "47b4eac84118e2606658122104e62072";
1378 1378 };
1379 1379 meta = {
1380 1380 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1381 1381 };
1382 1382 };
1383 1383 pyflakes = super.buildPythonPackage {
1384 1384 name = "pyflakes-0.8.1";
1385 1385 buildInputs = with self; [];
1386 1386 doCheck = false;
1387 1387 propagatedBuildInputs = with self; [];
1388 1388 src = fetchurl {
1389 1389 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1390 1390 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1391 1391 };
1392 1392 meta = {
1393 1393 license = [ pkgs.lib.licenses.mit ];
1394 1394 };
1395 1395 };
1396 1396 pygments-markdown-lexer = super.buildPythonPackage {
1397 1397 name = "pygments-markdown-lexer-0.1.0.dev39";
1398 1398 buildInputs = with self; [];
1399 1399 doCheck = false;
1400 1400 propagatedBuildInputs = with self; [Pygments];
1401 1401 src = fetchurl {
1402 1402 url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip";
1403 1403 md5 = "6360fe0f6d1f896e35b7a0142ce6459c";
1404 1404 };
1405 1405 meta = {
1406 1406 license = [ pkgs.lib.licenses.asl20 ];
1407 1407 };
1408 1408 };
1409 1409 pyparsing = super.buildPythonPackage {
1410 1410 name = "pyparsing-1.5.7";
1411 1411 buildInputs = with self; [];
1412 1412 doCheck = false;
1413 1413 propagatedBuildInputs = with self; [];
1414 1414 src = fetchurl {
1415 1415 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1416 1416 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1417 1417 };
1418 1418 meta = {
1419 1419 license = [ pkgs.lib.licenses.mit ];
1420 1420 };
1421 1421 };
1422 1422 pyramid = super.buildPythonPackage {
1423 1423 name = "pyramid-1.9";
1424 1424 buildInputs = with self; [];
1425 1425 doCheck = false;
1426 1426 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
1427 1427 src = fetchurl {
1428 1428 url = "https://pypi.python.org/packages/b0/73/715321e129334f3e41430bede877620175a63ed075fd5d1fd2c25b7cb121/pyramid-1.9.tar.gz";
1429 1429 md5 = "aa6c7c568f83151af51eb053ac633bc4";
1430 1430 };
1431 1431 meta = {
1432 1432 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1433 1433 };
1434 1434 };
1435 1435 pyramid-beaker = super.buildPythonPackage {
1436 1436 name = "pyramid-beaker-0.8";
1437 1437 buildInputs = with self; [];
1438 1438 doCheck = false;
1439 1439 propagatedBuildInputs = with self; [pyramid Beaker];
1440 1440 src = fetchurl {
1441 1441 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1442 1442 md5 = "22f14be31b06549f80890e2c63a93834";
1443 1443 };
1444 1444 meta = {
1445 1445 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1446 1446 };
1447 1447 };
1448 1448 pyramid-debugtoolbar = super.buildPythonPackage {
1449 name = "pyramid-debugtoolbar-3.0.5";
1449 name = "pyramid-debugtoolbar-4.2.1";
1450 1450 buildInputs = with self; [];
1451 1451 doCheck = false;
1452 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1452 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments ipaddress];
1453 1453 src = fetchurl {
1454 url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz";
1455 md5 = "aebab8c3bfdc6f89e4d3adc1d126538e";
1454 url = "https://pypi.python.org/packages/db/26/94620b7752936e2cd74838263ff366db9b454f7394bfb62d1eb2f84b29c1/pyramid_debugtoolbar-4.2.1.tar.gz";
1455 md5 = "3dfaced2fab1644ff5284017be9d92b9";
1456 1456 };
1457 1457 meta = {
1458 1458 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1459 1459 };
1460 1460 };
1461 1461 pyramid-jinja2 = super.buildPythonPackage {
1462 1462 name = "pyramid-jinja2-2.5";
1463 1463 buildInputs = with self; [];
1464 1464 doCheck = false;
1465 1465 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1466 1466 src = fetchurl {
1467 1467 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1468 1468 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1469 1469 };
1470 1470 meta = {
1471 1471 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1472 1472 };
1473 1473 };
1474 1474 pyramid-mako = super.buildPythonPackage {
1475 1475 name = "pyramid-mako-1.0.2";
1476 1476 buildInputs = with self; [];
1477 1477 doCheck = false;
1478 1478 propagatedBuildInputs = with self; [pyramid Mako];
1479 1479 src = fetchurl {
1480 1480 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1481 1481 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1482 1482 };
1483 1483 meta = {
1484 1484 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1485 1485 };
1486 1486 };
1487 1487 pysqlite = super.buildPythonPackage {
1488 1488 name = "pysqlite-2.8.3";
1489 1489 buildInputs = with self; [];
1490 1490 doCheck = false;
1491 1491 propagatedBuildInputs = with self; [];
1492 1492 src = fetchurl {
1493 1493 url = "https://pypi.python.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1494 1494 md5 = "033f17b8644577715aee55e8832ac9fc";
1495 1495 };
1496 1496 meta = {
1497 1497 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1498 1498 };
1499 1499 };
1500 1500 pytest = super.buildPythonPackage {
1501 1501 name = "pytest-3.1.2";
1502 1502 buildInputs = with self; [];
1503 1503 doCheck = false;
1504 1504 propagatedBuildInputs = with self; [py setuptools];
1505 1505 src = fetchurl {
1506 1506 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
1507 1507 md5 = "c4d179f89043cc925e1c169d03128e02";
1508 1508 };
1509 1509 meta = {
1510 1510 license = [ pkgs.lib.licenses.mit ];
1511 1511 };
1512 1512 };
1513 1513 pytest-catchlog = super.buildPythonPackage {
1514 1514 name = "pytest-catchlog-1.2.2";
1515 1515 buildInputs = with self; [];
1516 1516 doCheck = false;
1517 1517 propagatedBuildInputs = with self; [py pytest];
1518 1518 src = fetchurl {
1519 1519 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1520 1520 md5 = "09d890c54c7456c818102b7ff8c182c8";
1521 1521 };
1522 1522 meta = {
1523 1523 license = [ pkgs.lib.licenses.mit ];
1524 1524 };
1525 1525 };
1526 1526 pytest-cov = super.buildPythonPackage {
1527 1527 name = "pytest-cov-2.5.1";
1528 1528 buildInputs = with self; [];
1529 1529 doCheck = false;
1530 1530 propagatedBuildInputs = with self; [pytest coverage];
1531 1531 src = fetchurl {
1532 1532 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
1533 1533 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
1534 1534 };
1535 1535 meta = {
1536 1536 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1537 1537 };
1538 1538 };
1539 1539 pytest-profiling = super.buildPythonPackage {
1540 1540 name = "pytest-profiling-1.2.6";
1541 1541 buildInputs = with self; [];
1542 1542 doCheck = false;
1543 1543 propagatedBuildInputs = with self; [six pytest gprof2dot];
1544 1544 src = fetchurl {
1545 1545 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
1546 1546 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
1547 1547 };
1548 1548 meta = {
1549 1549 license = [ pkgs.lib.licenses.mit ];
1550 1550 };
1551 1551 };
1552 1552 pytest-runner = super.buildPythonPackage {
1553 1553 name = "pytest-runner-2.11.1";
1554 1554 buildInputs = with self; [];
1555 1555 doCheck = false;
1556 1556 propagatedBuildInputs = with self; [];
1557 1557 src = fetchurl {
1558 1558 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
1559 1559 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
1560 1560 };
1561 1561 meta = {
1562 1562 license = [ pkgs.lib.licenses.mit ];
1563 1563 };
1564 1564 };
1565 1565 pytest-sugar = super.buildPythonPackage {
1566 1566 name = "pytest-sugar-0.8.0";
1567 1567 buildInputs = with self; [];
1568 1568 doCheck = false;
1569 1569 propagatedBuildInputs = with self; [pytest termcolor];
1570 1570 src = fetchurl {
1571 1571 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
1572 1572 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
1573 1573 };
1574 1574 meta = {
1575 1575 license = [ pkgs.lib.licenses.bsdOriginal ];
1576 1576 };
1577 1577 };
1578 1578 pytest-timeout = super.buildPythonPackage {
1579 1579 name = "pytest-timeout-1.2.0";
1580 1580 buildInputs = with self; [];
1581 1581 doCheck = false;
1582 1582 propagatedBuildInputs = with self; [pytest];
1583 1583 src = fetchurl {
1584 1584 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
1585 1585 md5 = "83607d91aa163562c7ee835da57d061d";
1586 1586 };
1587 1587 meta = {
1588 1588 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1589 1589 };
1590 1590 };
1591 1591 python-dateutil = super.buildPythonPackage {
1592 1592 name = "python-dateutil-2.1";
1593 1593 buildInputs = with self; [];
1594 1594 doCheck = false;
1595 1595 propagatedBuildInputs = with self; [six];
1596 1596 src = fetchurl {
1597 1597 url = "https://pypi.python.org/packages/65/52/9c18dac21f174ad31b65e22d24297864a954e6fe65876eba3f5773d2da43/python-dateutil-2.1.tar.gz";
1598 1598 md5 = "1534bb15cf311f07afaa3aacba1c028b";
1599 1599 };
1600 1600 meta = {
1601 1601 license = [ { fullName = "Simplified BSD"; } ];
1602 1602 };
1603 1603 };
1604 1604 python-editor = super.buildPythonPackage {
1605 1605 name = "python-editor-1.0.3";
1606 1606 buildInputs = with self; [];
1607 1607 doCheck = false;
1608 1608 propagatedBuildInputs = with self; [];
1609 1609 src = fetchurl {
1610 1610 url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz";
1611 1611 md5 = "0aca5f2ef176ce68e98a5b7e31372835";
1612 1612 };
1613 1613 meta = {
1614 1614 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1615 1615 };
1616 1616 };
1617 1617 python-ldap = super.buildPythonPackage {
1618 1618 name = "python-ldap-2.4.40";
1619 1619 buildInputs = with self; [];
1620 1620 doCheck = false;
1621 1621 propagatedBuildInputs = with self; [setuptools];
1622 1622 src = fetchurl {
1623 1623 url = "https://pypi.python.org/packages/4a/d8/7d70a7469058a3987d224061a81d778951ac2b48220bdcc511e4b1b37176/python-ldap-2.4.40.tar.gz";
1624 1624 md5 = "aea0233f7d39b0c7549fcd310deeb0e5";
1625 1625 };
1626 1626 meta = {
1627 1627 license = [ pkgs.lib.licenses.psfl ];
1628 1628 };
1629 1629 };
1630 1630 python-memcached = super.buildPythonPackage {
1631 1631 name = "python-memcached-1.58";
1632 1632 buildInputs = with self; [];
1633 1633 doCheck = false;
1634 1634 propagatedBuildInputs = with self; [six];
1635 1635 src = fetchurl {
1636 1636 url = "https://pypi.python.org/packages/f7/62/14b2448cfb04427366f24104c9da97cf8ea380d7258a3233f066a951a8d8/python-memcached-1.58.tar.gz";
1637 1637 md5 = "23b258105013d14d899828d334e6b044";
1638 1638 };
1639 1639 meta = {
1640 1640 license = [ pkgs.lib.licenses.psfl ];
1641 1641 };
1642 1642 };
1643 1643 python-pam = super.buildPythonPackage {
1644 1644 name = "python-pam-1.8.2";
1645 1645 buildInputs = with self; [];
1646 1646 doCheck = false;
1647 1647 propagatedBuildInputs = with self; [];
1648 1648 src = fetchurl {
1649 1649 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1650 1650 md5 = "db71b6b999246fb05d78ecfbe166629d";
1651 1651 };
1652 1652 meta = {
1653 1653 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1654 1654 };
1655 1655 };
1656 1656 pytz = super.buildPythonPackage {
1657 1657 name = "pytz-2015.4";
1658 1658 buildInputs = with self; [];
1659 1659 doCheck = false;
1660 1660 propagatedBuildInputs = with self; [];
1661 1661 src = fetchurl {
1662 1662 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1663 1663 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1664 1664 };
1665 1665 meta = {
1666 1666 license = [ pkgs.lib.licenses.mit ];
1667 1667 };
1668 1668 };
1669 1669 pyzmq = super.buildPythonPackage {
1670 1670 name = "pyzmq-14.6.0";
1671 1671 buildInputs = with self; [];
1672 1672 doCheck = false;
1673 1673 propagatedBuildInputs = with self; [];
1674 1674 src = fetchurl {
1675 1675 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1676 1676 md5 = "395b5de95a931afa5b14c9349a5b8024";
1677 1677 };
1678 1678 meta = {
1679 1679 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1680 1680 };
1681 1681 };
1682 1682 recaptcha-client = super.buildPythonPackage {
1683 1683 name = "recaptcha-client-1.0.6";
1684 1684 buildInputs = with self; [];
1685 1685 doCheck = false;
1686 1686 propagatedBuildInputs = with self; [];
1687 1687 src = fetchurl {
1688 1688 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1689 1689 md5 = "74228180f7e1fb76c4d7089160b0d919";
1690 1690 };
1691 1691 meta = {
1692 1692 license = [ { fullName = "MIT/X11"; } ];
1693 1693 };
1694 1694 };
1695 1695 repoze.lru = super.buildPythonPackage {
1696 1696 name = "repoze.lru-0.6";
1697 1697 buildInputs = with self; [];
1698 1698 doCheck = false;
1699 1699 propagatedBuildInputs = with self; [];
1700 1700 src = fetchurl {
1701 1701 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1702 1702 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1703 1703 };
1704 1704 meta = {
1705 1705 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1706 1706 };
1707 1707 };
1708 1708 requests = super.buildPythonPackage {
1709 1709 name = "requests-2.9.1";
1710 1710 buildInputs = with self; [];
1711 1711 doCheck = false;
1712 1712 propagatedBuildInputs = with self; [];
1713 1713 src = fetchurl {
1714 1714 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1715 1715 md5 = "0b7f480d19012ec52bab78292efd976d";
1716 1716 };
1717 1717 meta = {
1718 1718 license = [ pkgs.lib.licenses.asl20 ];
1719 1719 };
1720 1720 };
1721 1721 rhodecode-enterprise-ce = super.buildPythonPackage {
1722 1722 name = "rhodecode-enterprise-ce-4.9.0";
1723 1723 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
1724 1724 doCheck = true;
1725 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress cssselect celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu lxml msgpack-python nbconvert packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1725 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic cssselect celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu lxml msgpack-python nbconvert packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1726 1726 src = ./.;
1727 1727 meta = {
1728 1728 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1729 1729 };
1730 1730 };
1731 1731 rhodecode-tools = super.buildPythonPackage {
1732 1732 name = "rhodecode-tools-0.12.0";
1733 1733 buildInputs = with self; [];
1734 1734 doCheck = false;
1735 1735 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh];
1736 1736 src = fetchurl {
1737 1737 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4";
1738 1738 md5 = "9ca040356fa7e38d3f64529a4cffdca4";
1739 1739 };
1740 1740 meta = {
1741 1741 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1742 1742 };
1743 1743 };
1744 1744 scandir = super.buildPythonPackage {
1745 1745 name = "scandir-1.5";
1746 1746 buildInputs = with self; [];
1747 1747 doCheck = false;
1748 1748 propagatedBuildInputs = with self; [];
1749 1749 src = fetchurl {
1750 1750 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
1751 1751 md5 = "a2713043de681bba6b084be42e7a8a44";
1752 1752 };
1753 1753 meta = {
1754 1754 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1755 1755 };
1756 1756 };
1757 1757 setproctitle = super.buildPythonPackage {
1758 1758 name = "setproctitle-1.1.8";
1759 1759 buildInputs = with self; [];
1760 1760 doCheck = false;
1761 1761 propagatedBuildInputs = with self; [];
1762 1762 src = fetchurl {
1763 1763 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1764 1764 md5 = "728f4c8c6031bbe56083a48594027edd";
1765 1765 };
1766 1766 meta = {
1767 1767 license = [ pkgs.lib.licenses.bsdOriginal ];
1768 1768 };
1769 1769 };
1770 1770 setuptools = super.buildPythonPackage {
1771 1771 name = "setuptools-30.1.0";
1772 1772 buildInputs = with self; [];
1773 1773 doCheck = false;
1774 1774 propagatedBuildInputs = with self; [];
1775 1775 src = fetchurl {
1776 1776 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
1777 1777 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
1778 1778 };
1779 1779 meta = {
1780 1780 license = [ pkgs.lib.licenses.mit ];
1781 1781 };
1782 1782 };
1783 1783 setuptools-scm = super.buildPythonPackage {
1784 1784 name = "setuptools-scm-1.15.0";
1785 1785 buildInputs = with self; [];
1786 1786 doCheck = false;
1787 1787 propagatedBuildInputs = with self; [];
1788 1788 src = fetchurl {
1789 1789 url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz";
1790 1790 md5 = "b6916c78ed6253d6602444fad4279c5b";
1791 1791 };
1792 1792 meta = {
1793 1793 license = [ pkgs.lib.licenses.mit ];
1794 1794 };
1795 1795 };
1796 1796 simplegeneric = super.buildPythonPackage {
1797 1797 name = "simplegeneric-0.8.1";
1798 1798 buildInputs = with self; [];
1799 1799 doCheck = false;
1800 1800 propagatedBuildInputs = with self; [];
1801 1801 src = fetchurl {
1802 1802 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1803 1803 md5 = "f9c1fab00fd981be588fc32759f474e3";
1804 1804 };
1805 1805 meta = {
1806 1806 license = [ pkgs.lib.licenses.zpt21 ];
1807 1807 };
1808 1808 };
1809 1809 simplejson = super.buildPythonPackage {
1810 1810 name = "simplejson-3.11.1";
1811 1811 buildInputs = with self; [];
1812 1812 doCheck = false;
1813 1813 propagatedBuildInputs = with self; [];
1814 1814 src = fetchurl {
1815 1815 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
1816 1816 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
1817 1817 };
1818 1818 meta = {
1819 1819 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1820 1820 };
1821 1821 };
1822 1822 six = super.buildPythonPackage {
1823 1823 name = "six-1.9.0";
1824 1824 buildInputs = with self; [];
1825 1825 doCheck = false;
1826 1826 propagatedBuildInputs = with self; [];
1827 1827 src = fetchurl {
1828 1828 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1829 1829 md5 = "476881ef4012262dfc8adc645ee786c4";
1830 1830 };
1831 1831 meta = {
1832 1832 license = [ pkgs.lib.licenses.mit ];
1833 1833 };
1834 1834 };
1835 1835 subprocess32 = super.buildPythonPackage {
1836 1836 name = "subprocess32-3.2.7";
1837 1837 buildInputs = with self; [];
1838 1838 doCheck = false;
1839 1839 propagatedBuildInputs = with self; [];
1840 1840 src = fetchurl {
1841 1841 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
1842 1842 md5 = "824c801e479d3e916879aae3e9c15e16";
1843 1843 };
1844 1844 meta = {
1845 1845 license = [ pkgs.lib.licenses.psfl ];
1846 1846 };
1847 1847 };
1848 1848 supervisor = super.buildPythonPackage {
1849 1849 name = "supervisor-3.3.2";
1850 1850 buildInputs = with self; [];
1851 1851 doCheck = false;
1852 1852 propagatedBuildInputs = with self; [meld3];
1853 1853 src = fetchurl {
1854 1854 url = "https://pypi.python.org/packages/7b/17/88adf8cb25f80e2bc0d18e094fcd7ab300632ea00b601cbbbb84c2419eae/supervisor-3.3.2.tar.gz";
1855 1855 md5 = "04766d62864da13d6a12f7429e75314f";
1856 1856 };
1857 1857 meta = {
1858 1858 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1859 1859 };
1860 1860 };
1861 1861 termcolor = super.buildPythonPackage {
1862 1862 name = "termcolor-1.1.0";
1863 1863 buildInputs = with self; [];
1864 1864 doCheck = false;
1865 1865 propagatedBuildInputs = with self; [];
1866 1866 src = fetchurl {
1867 1867 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
1868 1868 md5 = "043e89644f8909d462fbbfa511c768df";
1869 1869 };
1870 1870 meta = {
1871 1871 license = [ pkgs.lib.licenses.mit ];
1872 1872 };
1873 1873 };
1874 1874 testpath = super.buildPythonPackage {
1875 1875 name = "testpath-0.3.1";
1876 1876 buildInputs = with self; [];
1877 1877 doCheck = false;
1878 1878 propagatedBuildInputs = with self; [];
1879 1879 src = fetchurl {
1880 1880 url = "https://pypi.python.org/packages/f4/8b/b71e9ee10e5f751e9d959bc750ab122ba04187f5aa52aabdc4e63b0e31a7/testpath-0.3.1.tar.gz";
1881 1881 md5 = "2cd5ed5522fda781bb497c9d80ae2fc9";
1882 1882 };
1883 1883 meta = {
1884 1884 license = [ pkgs.lib.licenses.mit ];
1885 1885 };
1886 1886 };
1887 1887 traitlets = super.buildPythonPackage {
1888 1888 name = "traitlets-4.3.2";
1889 1889 buildInputs = with self; [];
1890 1890 doCheck = false;
1891 1891 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
1892 1892 src = fetchurl {
1893 1893 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
1894 1894 md5 = "3068663f2f38fd939a9eb3a500ccc154";
1895 1895 };
1896 1896 meta = {
1897 1897 license = [ pkgs.lib.licenses.bsdOriginal ];
1898 1898 };
1899 1899 };
1900 1900 transifex-client = super.buildPythonPackage {
1901 1901 name = "transifex-client-0.10";
1902 1902 buildInputs = with self; [];
1903 1903 doCheck = false;
1904 1904 propagatedBuildInputs = with self; [];
1905 1905 src = fetchurl {
1906 1906 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1907 1907 md5 = "5549538d84b8eede6b254cd81ae024fa";
1908 1908 };
1909 1909 meta = {
1910 1910 license = [ pkgs.lib.licenses.gpl2 ];
1911 1911 };
1912 1912 };
1913 1913 translationstring = super.buildPythonPackage {
1914 1914 name = "translationstring-1.3";
1915 1915 buildInputs = with self; [];
1916 1916 doCheck = false;
1917 1917 propagatedBuildInputs = with self; [];
1918 1918 src = fetchurl {
1919 1919 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1920 1920 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1921 1921 };
1922 1922 meta = {
1923 1923 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1924 1924 };
1925 1925 };
1926 1926 trollius = super.buildPythonPackage {
1927 1927 name = "trollius-1.0.4";
1928 1928 buildInputs = with self; [];
1929 1929 doCheck = false;
1930 1930 propagatedBuildInputs = with self; [futures];
1931 1931 src = fetchurl {
1932 1932 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1933 1933 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1934 1934 };
1935 1935 meta = {
1936 1936 license = [ pkgs.lib.licenses.asl20 ];
1937 1937 };
1938 1938 };
1939 1939 uWSGI = super.buildPythonPackage {
1940 1940 name = "uWSGI-2.0.15";
1941 1941 buildInputs = with self; [];
1942 1942 doCheck = false;
1943 1943 propagatedBuildInputs = with self; [];
1944 1944 src = fetchurl {
1945 1945 url = "https://pypi.python.org/packages/bb/0a/45e5aa80dc135889594bb371c082d20fb7ee7303b174874c996888cc8511/uwsgi-2.0.15.tar.gz";
1946 1946 md5 = "fc50bd9e83b7602fa474b032167010a7";
1947 1947 };
1948 1948 meta = {
1949 1949 license = [ pkgs.lib.licenses.gpl2 ];
1950 1950 };
1951 1951 };
1952 1952 urllib3 = super.buildPythonPackage {
1953 1953 name = "urllib3-1.16";
1954 1954 buildInputs = with self; [];
1955 1955 doCheck = false;
1956 1956 propagatedBuildInputs = with self; [];
1957 1957 src = fetchurl {
1958 1958 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1959 1959 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1960 1960 };
1961 1961 meta = {
1962 1962 license = [ pkgs.lib.licenses.mit ];
1963 1963 };
1964 1964 };
1965 1965 venusian = super.buildPythonPackage {
1966 1966 name = "venusian-1.1.0";
1967 1967 buildInputs = with self; [];
1968 1968 doCheck = false;
1969 1969 propagatedBuildInputs = with self; [];
1970 1970 src = fetchurl {
1971 1971 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
1972 1972 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
1973 1973 };
1974 1974 meta = {
1975 1975 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1976 1976 };
1977 1977 };
1978 1978 waitress = super.buildPythonPackage {
1979 1979 name = "waitress-1.0.2";
1980 1980 buildInputs = with self; [];
1981 1981 doCheck = false;
1982 1982 propagatedBuildInputs = with self; [];
1983 1983 src = fetchurl {
1984 1984 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
1985 1985 md5 = "b968f39e95d609f6194c6e50425d4bb7";
1986 1986 };
1987 1987 meta = {
1988 1988 license = [ pkgs.lib.licenses.zpt21 ];
1989 1989 };
1990 1990 };
1991 1991 wcwidth = super.buildPythonPackage {
1992 1992 name = "wcwidth-0.1.7";
1993 1993 buildInputs = with self; [];
1994 1994 doCheck = false;
1995 1995 propagatedBuildInputs = with self; [];
1996 1996 src = fetchurl {
1997 1997 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1998 1998 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
1999 1999 };
2000 2000 meta = {
2001 2001 license = [ pkgs.lib.licenses.mit ];
2002 2002 };
2003 2003 };
2004 2004 ws4py = super.buildPythonPackage {
2005 2005 name = "ws4py-0.3.5";
2006 2006 buildInputs = with self; [];
2007 2007 doCheck = false;
2008 2008 propagatedBuildInputs = with self; [];
2009 2009 src = fetchurl {
2010 2010 url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip";
2011 2011 md5 = "a261b75c20b980e55ce7451a3576a867";
2012 2012 };
2013 2013 meta = {
2014 2014 license = [ pkgs.lib.licenses.bsdOriginal ];
2015 2015 };
2016 2016 };
2017 2017 wsgiref = super.buildPythonPackage {
2018 2018 name = "wsgiref-0.1.2";
2019 2019 buildInputs = with self; [];
2020 2020 doCheck = false;
2021 2021 propagatedBuildInputs = with self; [];
2022 2022 src = fetchurl {
2023 2023 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2024 2024 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
2025 2025 };
2026 2026 meta = {
2027 2027 license = [ { fullName = "PSF or ZPL"; } ];
2028 2028 };
2029 2029 };
2030 2030 zope.cachedescriptors = super.buildPythonPackage {
2031 2031 name = "zope.cachedescriptors-4.0.0";
2032 2032 buildInputs = with self; [];
2033 2033 doCheck = false;
2034 2034 propagatedBuildInputs = with self; [setuptools];
2035 2035 src = fetchurl {
2036 2036 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
2037 2037 md5 = "8d308de8c936792c8e758058fcb7d0f0";
2038 2038 };
2039 2039 meta = {
2040 2040 license = [ pkgs.lib.licenses.zpt21 ];
2041 2041 };
2042 2042 };
2043 2043 zope.deprecation = super.buildPythonPackage {
2044 2044 name = "zope.deprecation-4.1.2";
2045 2045 buildInputs = with self; [];
2046 2046 doCheck = false;
2047 2047 propagatedBuildInputs = with self; [setuptools];
2048 2048 src = fetchurl {
2049 2049 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
2050 2050 md5 = "e9a663ded58f4f9f7881beb56cae2782";
2051 2051 };
2052 2052 meta = {
2053 2053 license = [ pkgs.lib.licenses.zpt21 ];
2054 2054 };
2055 2055 };
2056 2056 zope.event = super.buildPythonPackage {
2057 2057 name = "zope.event-4.0.3";
2058 2058 buildInputs = with self; [];
2059 2059 doCheck = false;
2060 2060 propagatedBuildInputs = with self; [setuptools];
2061 2061 src = fetchurl {
2062 2062 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
2063 2063 md5 = "9a3780916332b18b8b85f522bcc3e249";
2064 2064 };
2065 2065 meta = {
2066 2066 license = [ pkgs.lib.licenses.zpt21 ];
2067 2067 };
2068 2068 };
2069 2069 zope.interface = super.buildPythonPackage {
2070 2070 name = "zope.interface-4.1.3";
2071 2071 buildInputs = with self; [];
2072 2072 doCheck = false;
2073 2073 propagatedBuildInputs = with self; [setuptools];
2074 2074 src = fetchurl {
2075 2075 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
2076 2076 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
2077 2077 };
2078 2078 meta = {
2079 2079 license = [ pkgs.lib.licenses.zpt21 ];
2080 2080 };
2081 2081 };
2082 2082
2083 2083 ### Test requirements
2084 2084
2085 2085
2086 2086 }
@@ -1,136 +1,135 b''
1 1 ## core
2 2 setuptools==30.1.0
3 3 setuptools-scm==1.15.0
4 4
5 5 amqplib==1.0.2
6 6 anyjson==0.3.3
7 7 authomatic==0.1.0.post1
8 8 Babel==1.3
9 backport-ipaddress==0.1
10 9 Beaker==1.9.0
11 10 celery==2.2.10
12 11 Chameleon==2.24
13 12 channelstream==0.5.2
14 13 click==5.1
15 14 colander==1.3.3
16 15 configobj==5.0.6
17 16 cssselect==1.0.1
18 17 decorator==4.0.11
19 18 deform==2.0.4
20 19 docutils==0.13.1
21 20 dogpile.cache==0.6.4
22 21 dogpile.core==0.4.1
23 22 ecdsa==0.11
24 23 FormEncode==1.2.4
25 24 future==0.14.3
26 25 futures==3.0.2
27 26 gnureadline==6.3.3
28 27 infrae.cache==1.0.1
29 28 iso8601==0.1.11
30 29 itsdangerous==0.24
31 30 Jinja2==2.7.3
32 31 kombu==1.5.1
33 32 lxml==3.7.3
34 33 Mako==1.0.6
35 34 Markdown==2.6.8
36 35 MarkupSafe==0.23
37 36 meld3==1.0.2
38 37 msgpack-python==0.4.8
39 38 MySQL-python==1.2.5
40 39 nose==1.3.6
41 40 objgraph==3.1.0
42 41 packaging==15.2
43 42 paramiko==1.15.1
44 43 Paste==2.0.3
45 44 PasteDeploy==1.5.2
46 45 PasteScript==1.7.5
47 46 pathlib2==2.3.0
48 47 psutil==4.3.1
49 48 psycopg2==2.7.1
50 49 py-bcrypt==0.4
51 50 pycrypto==2.6.1
52 51 pycurl==7.19.5
53 52 pyflakes==0.8.1
54 53 pygments-markdown-lexer==0.1.0.dev39
55 54 Pygments==2.2.0
56 55 pyparsing==1.5.7
57 56 pyramid-beaker==0.8
58 pyramid-debugtoolbar==3.0.5
57 pyramid-debugtoolbar==4.2.1
59 58 pyramid-jinja2==2.5
60 59 pyramid-mako==1.0.2
61 60 pyramid==1.9.0
62 61 pysqlite==2.8.3
63 62 python-dateutil==2.1
64 63 python-ldap==2.4.40
65 64 python-memcached==1.58
66 65 python-pam==1.8.2
67 66 pytz==2015.4
68 67 pyzmq==14.6.0
69 68 recaptcha-client==1.0.6
70 69 repoze.lru==0.6
71 70 requests==2.9.1
72 71 Routes==1.13
73 72 setproctitle==1.1.8
74 73 simplejson==3.11.1
75 74 six==1.9.0
76 75 Sphinx==1.2.2
77 76 SQLAlchemy==0.9.9
78 77 subprocess32==3.2.7
79 78 supervisor==3.3.2
80 79 Tempita==0.5.2
81 80 translationstring==1.3
82 81 trollius==1.0.4
83 82 urllib3==1.16
84 83 URLObject==2.4.0
85 84 venusian==1.1.0
86 85 WebError==0.10.3
87 86 WebHelpers2==2.0
88 87 WebHelpers==1.3
89 88 WebOb==1.7.3
90 89 Whoosh==2.7.4
91 90 wsgiref==0.1.2
92 91 zope.cachedescriptors==4.0.0
93 92 zope.deprecation==4.1.2
94 93 zope.event==4.0.3
95 94 zope.interface==4.1.3
96 95
97 96 ## customized/patched libs
98 97 # our patched version of Pylons==1.0.2
99 98 https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1
100 99 # not released py-gfm==0.1.3
101 100 https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1
102 101
103 102 # IPYTHON RENDERING
104 103 # entrypoints backport, pypi version doesn't support egg installs
105 104 https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313#egg=entrypoints==0.2.2.rhodecode-upstream1
106 105 nbconvert==5.1.1
107 106 nbformat==4.3.0
108 107 jupyter_client==5.0.0
109 108
110 109 ## cli tools
111 110 alembic==0.9.2
112 111 invoke==0.13.0
113 112 bumpversion==0.5.3
114 113 transifex-client==0.10
115 114
116 115 ## http servers
117 116 gevent==1.2.2
118 117 greenlet==0.4.12
119 118 gunicorn==19.7.1
120 119 waitress==1.0.2
121 120 uWSGI==2.0.15
122 121
123 122 ## debug
124 123 ipdb==0.10.3
125 124 ipython==5.1.0
126 125 CProfileV==1.0.7
127 126 bottle==0.12.8
128 127
129 128 ## rhodecode-tools, special case
130 129 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4#egg=rhodecode-tools==0.12.0
131 130
132 131 ## appenlight
133 132 appenlight-client==0.6.21
134 133
135 134 ## test related requirements
136 135 -r requirements_test.txt
@@ -1,388 +1,385 b''
1 1 {
2 2 "libnghttp2-1.7.1": {
3 3 "MIT License": "http://spdx.org/licenses/MIT"
4 4 },
5 5 "nodejs-4.3.1": {
6 6 "MIT License": "http://spdx.org/licenses/MIT"
7 7 },
8 8 "python-2.7.12": {
9 9 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
10 10 },
11 11 "python2.7-Babel-1.3": {
12 12 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
13 13 },
14 14 "python2.7-Beaker-1.7.0": {
15 15 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
16 16 },
17 17 "python2.7-Chameleon-2.24": {
18 18 "BSD-like": "http://repoze.org/license.html"
19 19 },
20 20 "python2.7-FormEncode-1.2.4": {
21 21 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
22 22 },
23 23 "python2.7-Jinja2-2.7.3": {
24 24 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
25 25 },
26 26 "python2.7-Mako-1.0.6": {
27 27 "MIT License": "http://spdx.org/licenses/MIT"
28 28 },
29 29 "python2.7-Markdown-2.6.7": {
30 30 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
31 31 },
32 32 "python2.7-MarkupSafe-0.23": {
33 33 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
34 34 },
35 35 "python2.7-Paste-2.0.3": {
36 36 "MIT License": "http://spdx.org/licenses/MIT"
37 37 },
38 38 "python2.7-PasteDeploy-1.5.2": {
39 39 "MIT License": "http://spdx.org/licenses/MIT"
40 40 },
41 41 "python2.7-PasteScript-1.7.5": {
42 42 "MIT License": "http://spdx.org/licenses/MIT"
43 43 },
44 44 "python2.7-Pygments-2.2.0": {
45 45 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
46 46 },
47 47 "python2.7-Pylons-1.0.2.rhodecode-patch1": {
48 48 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
49 49 },
50 50 "python2.7-Routes-1.13": {
51 51 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
52 52 },
53 53 "python2.7-SQLAlchemy-0.9.9": {
54 54 "MIT License": "http://spdx.org/licenses/MIT"
55 55 },
56 56 "python2.7-Tempita-0.5.2": {
57 57 "MIT License": "http://spdx.org/licenses/MIT"
58 58 },
59 59 "python2.7-URLObject-2.4.0": {
60 60 "The Unlicense": "http://unlicense.org/"
61 61 },
62 62 "python2.7-WebError-0.10.3": {
63 63 "MIT License": "http://spdx.org/licenses/MIT"
64 64 },
65 65 "python2.7-WebHelpers-1.3": {
66 66 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
67 67 },
68 68 "python2.7-WebHelpers2-2.0": {
69 69 "MIT License": "http://spdx.org/licenses/MIT"
70 70 },
71 71 "python2.7-WebOb-1.3.1": {
72 72 "MIT License": "http://spdx.org/licenses/MIT"
73 73 },
74 74 "python2.7-Whoosh-2.7.4": {
75 75 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause",
76 76 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
77 77 },
78 78 "python2.7-alembic-0.8.4": {
79 79 "MIT License": "http://spdx.org/licenses/MIT"
80 80 },
81 81 "python2.7-amqplib-1.0.2": {
82 82 "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0"
83 83 },
84 84 "python2.7-anyjson-0.3.3": {
85 85 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
86 86 },
87 87 "python2.7-appenlight-client-0.6.14": {
88 88 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
89 89 },
90 90 "python2.7-authomatic-0.1.0.post1": {
91 91 "MIT License": "http://spdx.org/licenses/MIT"
92 },
93 "python2.7-backport-ipaddress-0.1": {
94 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
95 },
92 },
96 93 "python2.7-backports.shutil-get-terminal-size-1.0.0": {
97 94 "MIT License": "http://spdx.org/licenses/MIT"
98 95 },
99 96 "python2.7-bleach-1.5.0": {
100 97 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
101 98 },
102 99 "python2.7-celery-2.2.10": {
103 100 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
104 101 },
105 102 "python2.7-channelstream-0.5.2": {
106 103 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
107 104 },
108 105 "python2.7-click-5.1": {
109 106 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
110 107 },
111 108 "python2.7-colander-1.2": {
112 109 "Repoze License": "http://www.repoze.org/LICENSE.txt"
113 110 },
114 111 "python2.7-configobj-5.0.6": {
115 112 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
116 113 },
117 114 "python2.7-configparser-3.5.0": {
118 115 "MIT License": "http://spdx.org/licenses/MIT"
119 116 },
120 117 "python2.7-cssselect-1.0.1": {
121 118 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
122 119 },
123 120 "python2.7-decorator-4.0.11": {
124 121 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
125 122 },
126 123 "python2.7-deform-2.0a2": {
127 124 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
128 125 },
129 126 "python2.7-docutils-0.12": {
130 127 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause"
131 128 },
132 129 "python2.7-dogpile.cache-0.6.1": {
133 130 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
134 131 },
135 132 "python2.7-dogpile.core-0.4.1": {
136 133 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
137 134 },
138 135 "python2.7-elasticsearch-2.3.0": {
139 136 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
140 137 },
141 138 "python2.7-elasticsearch-dsl-2.2.0": {
142 139 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
143 140 },
144 141 "python2.7-entrypoints-0.2.2": {
145 142 "MIT License": "http://spdx.org/licenses/MIT"
146 143 },
147 144 "python2.7-enum34-1.1.6": {
148 145 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
149 146 },
150 147 "python2.7-functools32-3.2.3.post2": {
151 148 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
152 149 },
153 150 "python2.7-future-0.14.3": {
154 151 "MIT License": "http://spdx.org/licenses/MIT"
155 152 },
156 153 "python2.7-futures-3.0.2": {
157 154 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
158 155 },
159 156 "python2.7-gevent-1.1.2": {
160 157 "MIT License": "http://spdx.org/licenses/MIT"
161 158 },
162 159 "python2.7-gnureadline-6.3.3": {
163 160 "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0"
164 161 },
165 162 "python2.7-gprof2dot-2016.10.13": {
166 163 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
167 164 },
168 165 "python2.7-greenlet-0.4.10": {
169 166 "MIT License": "http://spdx.org/licenses/MIT"
170 167 },
171 168 "python2.7-gunicorn-19.6.0": {
172 169 "MIT License": "http://spdx.org/licenses/MIT"
173 170 },
174 171 "python2.7-html5lib-0.9999999": {
175 172 "MIT License": "http://spdx.org/licenses/MIT"
176 173 },
177 174 "python2.7-infrae.cache-1.0.1": {
178 175 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
179 176 },
180 177 "python2.7-ipython-5.1.0": {
181 178 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
182 179 },
183 180 "python2.7-ipython-genutils-0.2.0": {
184 181 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
185 182 },
186 183 "python2.7-iso8601-0.1.11": {
187 184 "MIT License": "http://spdx.org/licenses/MIT"
188 185 },
189 186 "python2.7-itsdangerous-0.24": {
190 187 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
191 188 },
192 189 "python2.7-jsonschema-2.6.0": {
193 190 "MIT License": "http://spdx.org/licenses/MIT"
194 191 },
195 192 "python2.7-jupyter-client-5.0.0": {
196 193 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
197 194 },
198 195 "python2.7-jupyter-core-4.3.0": {
199 196 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
200 197 },
201 198 "python2.7-kombu-1.5.1": {
202 199 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
203 200 },
204 201 "python2.7-mistune-0.7.4": {
205 202 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
206 203 },
207 204 "python2.7-msgpack-python-0.4.8": {
208 205 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
209 206 },
210 207 "python2.7-nbconvert-5.1.1": {
211 208 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
212 209 },
213 210 "python2.7-nbformat-4.3.0": {
214 211 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
215 212 },
216 213 "python2.7-packaging-15.2": {
217 214 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
218 215 },
219 216 "python2.7-pandocfilters-1.4.1": {
220 217 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
221 218 },
222 219 "python2.7-pathlib2-2.1.0": {
223 220 "MIT License": "http://spdx.org/licenses/MIT"
224 221 },
225 222 "python2.7-peppercorn-0.5": {
226 223 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
227 224 },
228 225 "python2.7-pexpect-4.2.1": {
229 226 "ISC License": "http://spdx.org/licenses/ISC"
230 227 },
231 228 "python2.7-pickleshare-0.7.4": {
232 229 "MIT License": "http://spdx.org/licenses/MIT"
233 230 },
234 231 "python2.7-prompt-toolkit-1.0.14": {
235 232 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
236 233 },
237 234 "python2.7-psutil-4.3.1": {
238 235 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
239 236 },
240 237 "python2.7-psycopg2-2.6.1": {
241 238 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
242 239 },
243 240 "python2.7-ptyprocess-0.5.1": {
244 241 "ISC License": "http://opensource.org/licenses/ISC"
245 242 },
246 243 "python2.7-py-1.4.31": {
247 244 "MIT License": "http://spdx.org/licenses/MIT"
248 245 },
249 246 "python2.7-py-bcrypt-0.4": {
250 247 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
251 248 },
252 249 "python2.7-py-gfm-0.1.3.rhodecode-upstream1": {
253 250 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
254 251 },
255 252 "python2.7-pycrypto-2.6.1": {
256 253 "Public Domain": null
257 254 },
258 255 "python2.7-pycurl-7.19.5": {
259 256 "MIT License": "http://spdx.org/licenses/MIT"
260 257 },
261 258 "python2.7-pygments-markdown-lexer-0.1.0.dev39": {
262 259 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
263 260 },
264 261 "python2.7-pyparsing-1.5.7": {
265 262 "MIT License": "http://spdx.org/licenses/MIT"
266 263 },
267 264 "python2.7-pyramid-1.7.4": {
268 265 "Repoze License": "http://www.repoze.org/LICENSE.txt"
269 266 },
270 267 "python2.7-pyramid-beaker-0.8": {
271 268 "Repoze License": "http://www.repoze.org/LICENSE.txt"
272 269 },
273 270 "python2.7-pyramid-debugtoolbar-3.0.5": {
274 271 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause",
275 272 "Repoze License": "http://www.repoze.org/LICENSE.txt"
276 273 },
277 274 "python2.7-pyramid-jinja2-2.5": {
278 275 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
279 276 },
280 277 "python2.7-pyramid-mako-1.0.2": {
281 278 "Repoze License": "http://www.repoze.org/LICENSE.txt"
282 279 },
283 280 "python2.7-pysqlite-2.6.3": {
284 281 "libpng License": "http://spdx.org/licenses/Libpng",
285 282 "zlib License": "http://spdx.org/licenses/Zlib"
286 283 },
287 284 "python2.7-pytest-3.0.5": {
288 285 "MIT License": "http://spdx.org/licenses/MIT"
289 286 },
290 287 "python2.7-pytest-profiling-1.2.2": {
291 288 "MIT License": "http://spdx.org/licenses/MIT"
292 289 },
293 290 "python2.7-pytest-runner-2.9": {
294 291 "MIT License": "http://spdx.org/licenses/MIT"
295 292 },
296 293 "python2.7-pytest-sugar-0.7.1": {
297 294 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
298 295 },
299 296 "python2.7-pytest-timeout-1.2.0": {
300 297 "MIT License": "http://spdx.org/licenses/MIT"
301 298 },
302 299 "python2.7-python-dateutil-2.1": {
303 300 "Simplified BSD": null
304 301 },
305 302 "python2.7-python-editor-1.0.3": {
306 303 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
307 304 },
308 305 "python2.7-python-ldap-2.4.19": {
309 306 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
310 307 },
311 308 "python2.7-python-memcached-1.57": {
312 309 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
313 310 },
314 311 "python2.7-pytz-2015.4": {
315 312 "MIT License": "http://spdx.org/licenses/MIT"
316 313 },
317 314 "python2.7-pyzmq-14.6.0": {
318 315 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
319 316 },
320 317 "python2.7-recaptcha-client-1.0.6": {
321 318 "MIT License": "http://spdx.org/licenses/MIT"
322 319 },
323 320 "python2.7-repoze.lru-0.6": {
324 321 "Repoze License": "http://www.repoze.org/LICENSE.txt"
325 322 },
326 323 "python2.7-requests-2.9.1": {
327 324 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
328 325 },
329 326 "python2.7-setuptools-19.4": {
330 327 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0",
331 328 "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0"
332 329 },
333 330 "python2.7-setuptools-scm-1.15.0": {
334 331 "MIT License": "http://spdx.org/licenses/MIT"
335 332 },
336 333 "python2.7-simplegeneric-0.8.1": {
337 334 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
338 335 },
339 336 "python2.7-simplejson-3.7.2": {
340 337 "MIT License": "http://spdx.org/licenses/MIT"
341 338 },
342 339 "python2.7-six-1.9.0": {
343 340 "MIT License": "http://spdx.org/licenses/MIT"
344 341 },
345 342 "python2.7-subprocess32-3.2.6": {
346 343 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
347 344 },
348 345 "python2.7-termcolor-1.1.0": {
349 346 "MIT License": "http://spdx.org/licenses/MIT"
350 347 },
351 348 "python2.7-testpath-0.1": {
352 349 "MIT License": "http://spdx.org/licenses/MIT"
353 350 },
354 351 "python2.7-traitlets-4.3.2": {
355 352 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
356 353 },
357 354 "python2.7-translationstring-1.3": {
358 355 "Repoze License": "http://www.repoze.org/LICENSE.txt"
359 356 },
360 357 "python2.7-urllib3-1.16": {
361 358 "MIT License": "http://spdx.org/licenses/MIT"
362 359 },
363 360 "python2.7-venusian-1.0": {
364 361 "Repoze License": "http://www.repoze.org/LICENSE.txt"
365 362 },
366 363 "python2.7-waitress-1.0.1": {
367 364 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
368 365 },
369 366 "python2.7-wcwidth-0.1.7": {
370 367 "MIT License": "http://spdx.org/licenses/MIT"
371 368 },
372 369 "python2.7-ws4py-0.3.5": {
373 370 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
374 371 },
375 372 "python2.7-zope.cachedescriptors-4.0.0": {
376 373 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
377 374 },
378 375 "python2.7-zope.deprecation-4.1.2": {
379 376 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
380 377 },
381 378 "python2.7-zope.interface-4.1.3": {
382 379 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
383 380 },
384 381 "xz-5.2.2": {
385 382 "GNU General Public License v2.0 or later": "http://spdx.org/licenses/GPL-2.0+",
386 383 "GNU Library General Public License v2.1 or later": "http://spdx.org/licenses/LGPL-2.1+"
387 384 }
388 385 } No newline at end of file
@@ -1,2026 +1,2027 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26 import inspect
27 27 import collections
28 28 import fnmatch
29 29 import hashlib
30 30 import itertools
31 31 import logging
32 32 import random
33 33 import traceback
34 34 from functools import wraps
35 35
36 36 import ipaddress
37 37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
38 38 from pylons.i18n.translation import _
39 39 # NOTE(marcink): this has to be removed only after pyramid migration,
40 40 # replace with _ = request.translate
41 41 from sqlalchemy.orm.exc import ObjectDeletedError
42 42 from sqlalchemy.orm import joinedload
43 43 from zope.cachedescriptors.property import Lazy as LazyProperty
44 44
45 45 import rhodecode
46 46 from rhodecode.model import meta
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.user import UserModel
49 49 from rhodecode.model.db import (
50 50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 51 UserIpMap, UserApiKeys, RepoGroup)
52 52 from rhodecode.lib import caches
53 53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
54 54 from rhodecode.lib.utils import (
55 55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 56 from rhodecode.lib.caching_query import FromCache
57 57
58 58
59 59 if rhodecode.is_unix:
60 60 import bcrypt
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 csrf_token_key = "csrf_token"
65 65
66 66
67 67 class PasswordGenerator(object):
68 68 """
69 69 This is a simple class for generating password from different sets of
70 70 characters
71 71 usage::
72 72
73 73 passwd_gen = PasswordGenerator()
74 74 #print 8-letter password containing only big and small letters
75 75 of alphabet
76 76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 77 """
78 78 ALPHABETS_NUM = r'''1234567890'''
79 79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88 88
89 89 def __init__(self, passwd=''):
90 90 self.passwd = passwd
91 91
92 92 def gen_password(self, length, type_=None):
93 93 if type_ is None:
94 94 type_ = self.ALPHABETS_FULL
95 95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
96 96 return self.passwd
97 97
98 98
99 99 class _RhodeCodeCryptoBase(object):
100 100 ENC_PREF = None
101 101
102 102 def hash_create(self, str_):
103 103 """
104 104 hash the string using
105 105
106 106 :param str_: password to hash
107 107 """
108 108 raise NotImplementedError
109 109
110 110 def hash_check_with_upgrade(self, password, hashed):
111 111 """
112 112 Returns tuple in which first element is boolean that states that
113 113 given password matches it's hashed version, and the second is new hash
114 114 of the password, in case this password should be migrated to new
115 115 cipher.
116 116 """
117 117 checked_hash = self.hash_check(password, hashed)
118 118 return checked_hash, None
119 119
120 120 def hash_check(self, password, hashed):
121 121 """
122 122 Checks matching password with it's hashed value.
123 123
124 124 :param password: password
125 125 :param hashed: password in hashed form
126 126 """
127 127 raise NotImplementedError
128 128
129 129 def _assert_bytes(self, value):
130 130 """
131 131 Passing in an `unicode` object can lead to hard to detect issues
132 132 if passwords contain non-ascii characters. Doing a type check
133 133 during runtime, so that such mistakes are detected early on.
134 134 """
135 135 if not isinstance(value, str):
136 136 raise TypeError(
137 137 "Bytestring required as input, got %r." % (value, ))
138 138
139 139
140 140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 141 ENC_PREF = ('$2a$10', '$2b$10')
142 142
143 143 def hash_create(self, str_):
144 144 self._assert_bytes(str_)
145 145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146 146
147 147 def hash_check_with_upgrade(self, password, hashed):
148 148 """
149 149 Returns tuple in which first element is boolean that states that
150 150 given password matches it's hashed version, and the second is new hash
151 151 of the password, in case this password should be migrated to new
152 152 cipher.
153 153
154 154 This implements special upgrade logic which works like that:
155 155 - check if the given password == bcrypted hash, if yes then we
156 156 properly used password and it was already in bcrypt. Proceed
157 157 without any changes
158 158 - if bcrypt hash check is not working try with sha256. If hash compare
159 159 is ok, it means we using correct but old hashed password. indicate
160 160 hash change and proceed
161 161 """
162 162
163 163 new_hash = None
164 164
165 165 # regular pw check
166 166 password_match_bcrypt = self.hash_check(password, hashed)
167 167
168 168 # now we want to know if the password was maybe from sha256
169 169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 170 if not password_match_bcrypt:
171 171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 172 new_hash = self.hash_create(password) # make new bcrypt hash
173 173 password_match_bcrypt = True
174 174
175 175 return password_match_bcrypt, new_hash
176 176
177 177 def hash_check(self, password, hashed):
178 178 """
179 179 Checks matching password with it's hashed value.
180 180
181 181 :param password: password
182 182 :param hashed: password in hashed form
183 183 """
184 184 self._assert_bytes(password)
185 185 try:
186 186 return bcrypt.hashpw(password, hashed) == hashed
187 187 except ValueError as e:
188 188 # we're having a invalid salt here probably, we should not crash
189 189 # just return with False as it would be a wrong password.
190 190 log.debug('Failed to check password hash using bcrypt %s',
191 191 safe_str(e))
192 192
193 193 return False
194 194
195 195
196 196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 197 ENC_PREF = '_'
198 198
199 199 def hash_create(self, str_):
200 200 self._assert_bytes(str_)
201 201 return hashlib.sha256(str_).hexdigest()
202 202
203 203 def hash_check(self, password, hashed):
204 204 """
205 205 Checks matching password with it's hashed value.
206 206
207 207 :param password: password
208 208 :param hashed: password in hashed form
209 209 """
210 210 self._assert_bytes(password)
211 211 return hashlib.sha256(password).hexdigest() == hashed
212 212
213 213
214 214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
215 215 ENC_PREF = '_'
216 216
217 217 def hash_create(self, str_):
218 218 self._assert_bytes(str_)
219 219 return hashlib.md5(str_).hexdigest()
220 220
221 221 def hash_check(self, password, hashed):
222 222 """
223 223 Checks matching password with it's hashed value.
224 224
225 225 :param password: password
226 226 :param hashed: password in hashed form
227 227 """
228 228 self._assert_bytes(password)
229 229 return hashlib.md5(password).hexdigest() == hashed
230 230
231 231
232 232 def crypto_backend():
233 233 """
234 234 Return the matching crypto backend.
235 235
236 236 Selection is based on if we run tests or not, we pick md5 backend to run
237 237 tests faster since BCRYPT is expensive to calculate
238 238 """
239 239 if rhodecode.is_test:
240 240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
241 241 else:
242 242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243 243
244 244 return RhodeCodeCrypto
245 245
246 246
247 247 def get_crypt_password(password):
248 248 """
249 249 Create the hash of `password` with the active crypto backend.
250 250
251 251 :param password: The cleartext password.
252 252 :type password: unicode
253 253 """
254 254 password = safe_str(password)
255 255 return crypto_backend().hash_create(password)
256 256
257 257
258 258 def check_password(password, hashed):
259 259 """
260 260 Check if the value in `password` matches the hash in `hashed`.
261 261
262 262 :param password: The cleartext password.
263 263 :type password: unicode
264 264
265 265 :param hashed: The expected hashed version of the password.
266 266 :type hashed: The hash has to be passed in in text representation.
267 267 """
268 268 password = safe_str(password)
269 269 return crypto_backend().hash_check(password, hashed)
270 270
271 271
272 272 def generate_auth_token(data, salt=None):
273 273 """
274 274 Generates API KEY from given string
275 275 """
276 276
277 277 if salt is None:
278 278 salt = os.urandom(16)
279 279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
280 280
281 281
282 282 class CookieStoreWrapper(object):
283 283
284 284 def __init__(self, cookie_store):
285 285 self.cookie_store = cookie_store
286 286
287 287 def __repr__(self):
288 288 return 'CookieStore<%s>' % (self.cookie_store)
289 289
290 290 def get(self, key, other=None):
291 291 if isinstance(self.cookie_store, dict):
292 292 return self.cookie_store.get(key, other)
293 293 elif isinstance(self.cookie_store, AuthUser):
294 294 return self.cookie_store.__dict__.get(key, other)
295 295
296 296
297 297 def _cached_perms_data(user_id, scope, user_is_admin,
298 298 user_inherit_default_permissions, explicit, algo):
299 299
300 300 permissions = PermissionCalculator(
301 301 user_id, scope, user_is_admin, user_inherit_default_permissions,
302 302 explicit, algo)
303 303 return permissions.calculate()
304 304
305 305
306 306 class PermOrigin(object):
307 307 ADMIN = 'superadmin'
308 308
309 309 REPO_USER = 'user:%s'
310 310 REPO_USERGROUP = 'usergroup:%s'
311 311 REPO_OWNER = 'repo.owner'
312 312 REPO_DEFAULT = 'repo.default'
313 313 REPO_PRIVATE = 'repo.private'
314 314
315 315 REPOGROUP_USER = 'user:%s'
316 316 REPOGROUP_USERGROUP = 'usergroup:%s'
317 317 REPOGROUP_OWNER = 'group.owner'
318 318 REPOGROUP_DEFAULT = 'group.default'
319 319
320 320 USERGROUP_USER = 'user:%s'
321 321 USERGROUP_USERGROUP = 'usergroup:%s'
322 322 USERGROUP_OWNER = 'usergroup.owner'
323 323 USERGROUP_DEFAULT = 'usergroup.default'
324 324
325 325
326 326 class PermOriginDict(dict):
327 327 """
328 328 A special dict used for tracking permissions along with their origins.
329 329
330 330 `__setitem__` has been overridden to expect a tuple(perm, origin)
331 331 `__getitem__` will return only the perm
332 332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
333 333
334 334 >>> perms = PermOriginDict()
335 335 >>> perms['resource'] = 'read', 'default'
336 336 >>> perms['resource']
337 337 'read'
338 338 >>> perms['resource'] = 'write', 'admin'
339 339 >>> perms['resource']
340 340 'write'
341 341 >>> perms.perm_origin_stack
342 342 {'resource': [('read', 'default'), ('write', 'admin')]}
343 343 """
344 344
345 345 def __init__(self, *args, **kw):
346 346 dict.__init__(self, *args, **kw)
347 347 self.perm_origin_stack = {}
348 348
349 349 def __setitem__(self, key, (perm, origin)):
350 350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
351 351 dict.__setitem__(self, key, perm)
352 352
353 353
354 354 class PermissionCalculator(object):
355 355
356 356 def __init__(
357 357 self, user_id, scope, user_is_admin,
358 358 user_inherit_default_permissions, explicit, algo):
359 359 self.user_id = user_id
360 360 self.user_is_admin = user_is_admin
361 361 self.inherit_default_permissions = user_inherit_default_permissions
362 362 self.explicit = explicit
363 363 self.algo = algo
364 364
365 365 scope = scope or {}
366 366 self.scope_repo_id = scope.get('repo_id')
367 367 self.scope_repo_group_id = scope.get('repo_group_id')
368 368 self.scope_user_group_id = scope.get('user_group_id')
369 369
370 370 self.default_user_id = User.get_default_user(cache=True).user_id
371 371
372 372 self.permissions_repositories = PermOriginDict()
373 373 self.permissions_repository_groups = PermOriginDict()
374 374 self.permissions_user_groups = PermOriginDict()
375 375 self.permissions_global = set()
376 376
377 377 self.default_repo_perms = Permission.get_default_repo_perms(
378 378 self.default_user_id, self.scope_repo_id)
379 379 self.default_repo_groups_perms = Permission.get_default_group_perms(
380 380 self.default_user_id, self.scope_repo_group_id)
381 381 self.default_user_group_perms = \
382 382 Permission.get_default_user_group_perms(
383 383 self.default_user_id, self.scope_user_group_id)
384 384
385 385 def calculate(self):
386 386 if self.user_is_admin:
387 387 return self._admin_permissions()
388 388
389 389 self._calculate_global_default_permissions()
390 390 self._calculate_global_permissions()
391 391 self._calculate_default_permissions()
392 392 self._calculate_repository_permissions()
393 393 self._calculate_repository_group_permissions()
394 394 self._calculate_user_group_permissions()
395 395 return self._permission_structure()
396 396
397 397 def _admin_permissions(self):
398 398 """
399 399 admin user have all default rights for repositories
400 400 and groups set to admin
401 401 """
402 402 self.permissions_global.add('hg.admin')
403 403 self.permissions_global.add('hg.create.write_on_repogroup.true')
404 404
405 405 # repositories
406 406 for perm in self.default_repo_perms:
407 407 r_k = perm.UserRepoToPerm.repository.repo_name
408 408 p = 'repository.admin'
409 409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
410 410
411 411 # repository groups
412 412 for perm in self.default_repo_groups_perms:
413 413 rg_k = perm.UserRepoGroupToPerm.group.group_name
414 414 p = 'group.admin'
415 415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
416 416
417 417 # user groups
418 418 for perm in self.default_user_group_perms:
419 419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
420 420 p = 'usergroup.admin'
421 421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
422 422
423 423 return self._permission_structure()
424 424
425 425 def _calculate_global_default_permissions(self):
426 426 """
427 427 global permissions taken from the default user
428 428 """
429 429 default_global_perms = UserToPerm.query()\
430 430 .filter(UserToPerm.user_id == self.default_user_id)\
431 431 .options(joinedload(UserToPerm.permission))
432 432
433 433 for perm in default_global_perms:
434 434 self.permissions_global.add(perm.permission.permission_name)
435 435
436 436 def _calculate_global_permissions(self):
437 437 """
438 438 Set global system permissions with user permissions or permissions
439 439 taken from the user groups of the current user.
440 440
441 441 The permissions include repo creating, repo group creating, forking
442 442 etc.
443 443 """
444 444
445 445 # now we read the defined permissions and overwrite what we have set
446 446 # before those can be configured from groups or users explicitly.
447 447
448 448 # TODO: johbo: This seems to be out of sync, find out the reason
449 449 # for the comment below and update it.
450 450
451 451 # In case we want to extend this list we should be always in sync with
452 452 # User.DEFAULT_USER_PERMISSIONS definitions
453 453 _configurable = frozenset([
454 454 'hg.fork.none', 'hg.fork.repository',
455 455 'hg.create.none', 'hg.create.repository',
456 456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
457 457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
458 458 'hg.create.write_on_repogroup.false',
459 459 'hg.create.write_on_repogroup.true',
460 460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
461 461 ])
462 462
463 463 # USER GROUPS comes first user group global permissions
464 464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
465 465 .options(joinedload(UserGroupToPerm.permission))\
466 466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
467 467 UserGroupMember.users_group_id))\
468 468 .filter(UserGroupMember.user_id == self.user_id)\
469 469 .order_by(UserGroupToPerm.users_group_id)\
470 470 .all()
471 471
472 472 # need to group here by groups since user can be in more than
473 473 # one group, so we get all groups
474 474 _explicit_grouped_perms = [
475 475 [x, list(y)] for x, y in
476 476 itertools.groupby(user_perms_from_users_groups,
477 477 lambda _x: _x.users_group)]
478 478
479 479 for gr, perms in _explicit_grouped_perms:
480 480 # since user can be in multiple groups iterate over them and
481 481 # select the lowest permissions first (more explicit)
482 482 # TODO: marcink: do this^^
483 483
484 484 # group doesn't inherit default permissions so we actually set them
485 485 if not gr.inherit_default_permissions:
486 486 # NEED TO IGNORE all previously set configurable permissions
487 487 # and replace them with explicitly set from this user
488 488 # group permissions
489 489 self.permissions_global = self.permissions_global.difference(
490 490 _configurable)
491 491 for perm in perms:
492 492 self.permissions_global.add(perm.permission.permission_name)
493 493
494 494 # user explicit global permissions
495 495 user_perms = Session().query(UserToPerm)\
496 496 .options(joinedload(UserToPerm.permission))\
497 497 .filter(UserToPerm.user_id == self.user_id).all()
498 498
499 499 if not self.inherit_default_permissions:
500 500 # NEED TO IGNORE all configurable permissions and
501 501 # replace them with explicitly set from this user permissions
502 502 self.permissions_global = self.permissions_global.difference(
503 503 _configurable)
504 504 for perm in user_perms:
505 505 self.permissions_global.add(perm.permission.permission_name)
506 506
507 507 def _calculate_default_permissions(self):
508 508 """
509 509 Set default user permissions for repositories, repository groups
510 510 taken from the default user.
511 511
512 512 Calculate inheritance of object permissions based on what we have now
513 513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
514 514 explicitly set. Inherit is the opposite of .false being there.
515 515
516 516 .. note::
517 517
518 518 the syntax is little bit odd but what we need to check here is
519 519 the opposite of .false permission being in the list so even for
520 520 inconsistent state when both .true/.false is there
521 521 .false is more important
522 522
523 523 """
524 524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
525 525 in self.permissions_global)
526 526
527 527 # defaults for repositories, taken from `default` user permissions
528 528 # on given repo
529 529 for perm in self.default_repo_perms:
530 530 r_k = perm.UserRepoToPerm.repository.repo_name
531 531 o = PermOrigin.REPO_DEFAULT
532 532 if perm.Repository.private and not (
533 533 perm.Repository.user_id == self.user_id):
534 534 # disable defaults for private repos,
535 535 p = 'repository.none'
536 536 o = PermOrigin.REPO_PRIVATE
537 537 elif perm.Repository.user_id == self.user_id:
538 538 # set admin if owner
539 539 p = 'repository.admin'
540 540 o = PermOrigin.REPO_OWNER
541 541 else:
542 542 p = perm.Permission.permission_name
543 543 # if we decide this user isn't inheriting permissions from
544 544 # default user we set him to .none so only explicit
545 545 # permissions work
546 546 if not user_inherit_object_permissions:
547 547 p = 'repository.none'
548 548 self.permissions_repositories[r_k] = p, o
549 549
550 550 # defaults for repository groups taken from `default` user permission
551 551 # on given group
552 552 for perm in self.default_repo_groups_perms:
553 553 rg_k = perm.UserRepoGroupToPerm.group.group_name
554 554 o = PermOrigin.REPOGROUP_DEFAULT
555 555 if perm.RepoGroup.user_id == self.user_id:
556 556 # set admin if owner
557 557 p = 'group.admin'
558 558 o = PermOrigin.REPOGROUP_OWNER
559 559 else:
560 560 p = perm.Permission.permission_name
561 561
562 562 # if we decide this user isn't inheriting permissions from default
563 563 # user we set him to .none so only explicit permissions work
564 564 if not user_inherit_object_permissions:
565 565 p = 'group.none'
566 566 self.permissions_repository_groups[rg_k] = p, o
567 567
568 568 # defaults for user groups taken from `default` user permission
569 569 # on given user group
570 570 for perm in self.default_user_group_perms:
571 571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
572 572 o = PermOrigin.USERGROUP_DEFAULT
573 573 if perm.UserGroup.user_id == self.user_id:
574 574 # set admin if owner
575 575 p = 'usergroup.admin'
576 576 o = PermOrigin.USERGROUP_OWNER
577 577 else:
578 578 p = perm.Permission.permission_name
579 579
580 580 # if we decide this user isn't inheriting permissions from default
581 581 # user we set him to .none so only explicit permissions work
582 582 if not user_inherit_object_permissions:
583 583 p = 'usergroup.none'
584 584 self.permissions_user_groups[u_k] = p, o
585 585
586 586 def _calculate_repository_permissions(self):
587 587 """
588 588 Repository permissions for the current user.
589 589
590 590 Check if the user is part of user groups for this repository and
591 591 fill in the permission from it. `_choose_permission` decides of which
592 592 permission should be selected based on selected method.
593 593 """
594 594
595 595 # user group for repositories permissions
596 596 user_repo_perms_from_user_group = Permission\
597 597 .get_default_repo_perms_from_user_group(
598 598 self.user_id, self.scope_repo_id)
599 599
600 600 multiple_counter = collections.defaultdict(int)
601 601 for perm in user_repo_perms_from_user_group:
602 602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
603 603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
604 604 multiple_counter[r_k] += 1
605 605 p = perm.Permission.permission_name
606 606 o = PermOrigin.REPO_USERGROUP % ug_k
607 607
608 608 if perm.Repository.user_id == self.user_id:
609 609 # set admin if owner
610 610 p = 'repository.admin'
611 611 o = PermOrigin.REPO_OWNER
612 612 else:
613 613 if multiple_counter[r_k] > 1:
614 614 cur_perm = self.permissions_repositories[r_k]
615 615 p = self._choose_permission(p, cur_perm)
616 616 self.permissions_repositories[r_k] = p, o
617 617
618 618 # user explicit permissions for repositories, overrides any specified
619 619 # by the group permission
620 620 user_repo_perms = Permission.get_default_repo_perms(
621 621 self.user_id, self.scope_repo_id)
622 622 for perm in user_repo_perms:
623 623 r_k = perm.UserRepoToPerm.repository.repo_name
624 624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
625 625 # set admin if owner
626 626 if perm.Repository.user_id == self.user_id:
627 627 p = 'repository.admin'
628 628 o = PermOrigin.REPO_OWNER
629 629 else:
630 630 p = perm.Permission.permission_name
631 631 if not self.explicit:
632 632 cur_perm = self.permissions_repositories.get(
633 633 r_k, 'repository.none')
634 634 p = self._choose_permission(p, cur_perm)
635 635 self.permissions_repositories[r_k] = p, o
636 636
637 637 def _calculate_repository_group_permissions(self):
638 638 """
639 639 Repository group permissions for the current user.
640 640
641 641 Check if the user is part of user groups for repository groups and
642 642 fill in the permissions from it. `_choose_permmission` decides of which
643 643 permission should be selected based on selected method.
644 644 """
645 645 # user group for repo groups permissions
646 646 user_repo_group_perms_from_user_group = Permission\
647 647 .get_default_group_perms_from_user_group(
648 648 self.user_id, self.scope_repo_group_id)
649 649
650 650 multiple_counter = collections.defaultdict(int)
651 651 for perm in user_repo_group_perms_from_user_group:
652 652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
653 653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
654 654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
655 655 multiple_counter[g_k] += 1
656 656 p = perm.Permission.permission_name
657 657 if perm.RepoGroup.user_id == self.user_id:
658 658 # set admin if owner, even for member of other user group
659 659 p = 'group.admin'
660 660 o = PermOrigin.REPOGROUP_OWNER
661 661 else:
662 662 if multiple_counter[g_k] > 1:
663 663 cur_perm = self.permissions_repository_groups[g_k]
664 664 p = self._choose_permission(p, cur_perm)
665 665 self.permissions_repository_groups[g_k] = p, o
666 666
667 667 # user explicit permissions for repository groups
668 668 user_repo_groups_perms = Permission.get_default_group_perms(
669 669 self.user_id, self.scope_repo_group_id)
670 670 for perm in user_repo_groups_perms:
671 671 rg_k = perm.UserRepoGroupToPerm.group.group_name
672 672 u_k = perm.UserRepoGroupToPerm.user.username
673 673 o = PermOrigin.REPOGROUP_USER % u_k
674 674
675 675 if perm.RepoGroup.user_id == self.user_id:
676 676 # set admin if owner
677 677 p = 'group.admin'
678 678 o = PermOrigin.REPOGROUP_OWNER
679 679 else:
680 680 p = perm.Permission.permission_name
681 681 if not self.explicit:
682 682 cur_perm = self.permissions_repository_groups.get(
683 683 rg_k, 'group.none')
684 684 p = self._choose_permission(p, cur_perm)
685 685 self.permissions_repository_groups[rg_k] = p, o
686 686
687 687 def _calculate_user_group_permissions(self):
688 688 """
689 689 User group permissions for the current user.
690 690 """
691 691 # user group for user group permissions
692 692 user_group_from_user_group = Permission\
693 693 .get_default_user_group_perms_from_user_group(
694 694 self.user_id, self.scope_user_group_id)
695 695
696 696 multiple_counter = collections.defaultdict(int)
697 697 for perm in user_group_from_user_group:
698 698 g_k = perm.UserGroupUserGroupToPerm\
699 699 .target_user_group.users_group_name
700 700 u_k = perm.UserGroupUserGroupToPerm\
701 701 .user_group.users_group_name
702 702 o = PermOrigin.USERGROUP_USERGROUP % u_k
703 703 multiple_counter[g_k] += 1
704 704 p = perm.Permission.permission_name
705 705
706 706 if perm.UserGroup.user_id == self.user_id:
707 707 # set admin if owner, even for member of other user group
708 708 p = 'usergroup.admin'
709 709 o = PermOrigin.USERGROUP_OWNER
710 710 else:
711 711 if multiple_counter[g_k] > 1:
712 712 cur_perm = self.permissions_user_groups[g_k]
713 713 p = self._choose_permission(p, cur_perm)
714 714 self.permissions_user_groups[g_k] = p, o
715 715
716 716 # user explicit permission for user groups
717 717 user_user_groups_perms = Permission.get_default_user_group_perms(
718 718 self.user_id, self.scope_user_group_id)
719 719 for perm in user_user_groups_perms:
720 720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
721 721 u_k = perm.UserUserGroupToPerm.user.username
722 722 o = PermOrigin.USERGROUP_USER % u_k
723 723
724 724 if perm.UserGroup.user_id == self.user_id:
725 725 # set admin if owner
726 726 p = 'usergroup.admin'
727 727 o = PermOrigin.USERGROUP_OWNER
728 728 else:
729 729 p = perm.Permission.permission_name
730 730 if not self.explicit:
731 731 cur_perm = self.permissions_user_groups.get(
732 732 ug_k, 'usergroup.none')
733 733 p = self._choose_permission(p, cur_perm)
734 734 self.permissions_user_groups[ug_k] = p, o
735 735
736 736 def _choose_permission(self, new_perm, cur_perm):
737 737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
738 738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
739 739 if self.algo == 'higherwin':
740 740 if new_perm_val > cur_perm_val:
741 741 return new_perm
742 742 return cur_perm
743 743 elif self.algo == 'lowerwin':
744 744 if new_perm_val < cur_perm_val:
745 745 return new_perm
746 746 return cur_perm
747 747
748 748 def _permission_structure(self):
749 749 return {
750 750 'global': self.permissions_global,
751 751 'repositories': self.permissions_repositories,
752 752 'repositories_groups': self.permissions_repository_groups,
753 753 'user_groups': self.permissions_user_groups,
754 754 }
755 755
756 756
757 757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
758 758 """
759 759 Check if given controller_name is in whitelist of auth token access
760 760 """
761 761 if not whitelist:
762 762 from rhodecode import CONFIG
763 763 whitelist = aslist(
764 764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
765 765 log.debug(
766 766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
767 767
768 768 auth_token_access_valid = False
769 769 for entry in whitelist:
770 770 if fnmatch.fnmatch(controller_name, entry):
771 771 auth_token_access_valid = True
772 772 break
773 773
774 774 if auth_token_access_valid:
775 775 log.debug('controller:%s matches entry in whitelist'
776 776 % (controller_name,))
777 777 else:
778 778 msg = ('controller: %s does *NOT* match any entry in whitelist'
779 779 % (controller_name,))
780 780 if auth_token:
781 781 # if we use auth token key and don't have access it's a warning
782 782 log.warning(msg)
783 783 else:
784 784 log.debug(msg)
785 785
786 786 return auth_token_access_valid
787 787
788 788
789 789 class AuthUser(object):
790 790 """
791 791 A simple object that handles all attributes of user in RhodeCode
792 792
793 793 It does lookup based on API key,given user, or user present in session
794 794 Then it fills all required information for such user. It also checks if
795 795 anonymous access is enabled and if so, it returns default user as logged in
796 796 """
797 797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
798 798
799 799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
800 800
801 801 self.user_id = user_id
802 802 self._api_key = api_key
803 803
804 804 self.api_key = None
805 805 self.feed_token = ''
806 806 self.username = username
807 807 self.ip_addr = ip_addr
808 808 self.name = ''
809 809 self.lastname = ''
810 810 self.first_name = ''
811 811 self.last_name = ''
812 812 self.email = ''
813 813 self.is_authenticated = False
814 814 self.admin = False
815 815 self.inherit_default_permissions = False
816 816 self.password = ''
817 817
818 818 self.anonymous_user = None # propagated on propagate_data
819 819 self.propagate_data()
820 820 self._instance = None
821 821 self._permissions_scoped_cache = {} # used to bind scoped calculation
822 822
823 823 @LazyProperty
824 824 def permissions(self):
825 825 return self.get_perms(user=self, cache=False)
826 826
827 827 def permissions_with_scope(self, scope):
828 828 """
829 829 Call the get_perms function with scoped data. The scope in that function
830 830 narrows the SQL calls to the given ID of objects resulting in fetching
831 831 Just particular permission we want to obtain. If scope is an empty dict
832 832 then it basically narrows the scope to GLOBAL permissions only.
833 833
834 834 :param scope: dict
835 835 """
836 836 if 'repo_name' in scope:
837 837 obj = Repository.get_by_repo_name(scope['repo_name'])
838 838 if obj:
839 839 scope['repo_id'] = obj.repo_id
840 840 _scope = {
841 841 'repo_id': -1,
842 842 'user_group_id': -1,
843 843 'repo_group_id': -1,
844 844 }
845 845 _scope.update(scope)
846 846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
847 847 _scope.items())))
848 848 if cache_key not in self._permissions_scoped_cache:
849 849 # store in cache to mimic how the @LazyProperty works,
850 850 # the difference here is that we use the unique key calculated
851 851 # from params and values
852 852 res = self.get_perms(user=self, cache=False, scope=_scope)
853 853 self._permissions_scoped_cache[cache_key] = res
854 854 return self._permissions_scoped_cache[cache_key]
855 855
856 856 def get_instance(self):
857 857 return User.get(self.user_id)
858 858
859 859 def update_lastactivity(self):
860 860 if self.user_id:
861 861 User.get(self.user_id).update_lastactivity()
862 862
863 863 def propagate_data(self):
864 864 """
865 865 Fills in user data and propagates values to this instance. Maps fetched
866 866 user attributes to this class instance attributes
867 867 """
868 868 log.debug('starting data propagation for new potential AuthUser')
869 869 user_model = UserModel()
870 870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
871 871 is_user_loaded = False
872 872
873 873 # lookup by userid
874 874 if self.user_id is not None and self.user_id != anon_user.user_id:
875 875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
876 876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
877 877
878 878 # try go get user by api key
879 879 elif self._api_key and self._api_key != anon_user.api_key:
880 880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
881 881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
882 882
883 883 # lookup by username
884 884 elif self.username:
885 885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
886 886 is_user_loaded = user_model.fill_data(self, username=self.username)
887 887 else:
888 888 log.debug('No data in %s that could been used to log in' % self)
889 889
890 890 if not is_user_loaded:
891 891 log.debug('Failed to load user. Fallback to default user')
892 892 # if we cannot authenticate user try anonymous
893 893 if anon_user.active:
894 894 user_model.fill_data(self, user_id=anon_user.user_id)
895 895 # then we set this user is logged in
896 896 self.is_authenticated = True
897 897 else:
898 898 # in case of disabled anonymous user we reset some of the
899 899 # parameters so such user is "corrupted", skipping the fill_data
900 900 for attr in ['user_id', 'username', 'admin', 'active']:
901 901 setattr(self, attr, None)
902 902 self.is_authenticated = False
903 903
904 904 if not self.username:
905 905 self.username = 'None'
906 906
907 907 log.debug('Auth User is now %s' % self)
908 908
909 909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
910 910 cache=False):
911 911 """
912 912 Fills user permission attribute with permissions taken from database
913 913 works for permissions given for repositories, and for permissions that
914 914 are granted to groups
915 915
916 916 :param user: instance of User object from database
917 917 :param explicit: In case there are permissions both for user and a group
918 918 that user is part of, explicit flag will defiine if user will
919 919 explicitly override permissions from group, if it's False it will
920 920 make decision based on the algo
921 921 :param algo: algorithm to decide what permission should be choose if
922 922 it's multiple defined, eg user in two different groups. It also
923 923 decides if explicit flag is turned off how to specify the permission
924 924 for case when user is in a group + have defined separate permission
925 925 """
926 926 user_id = user.user_id
927 927 user_is_admin = user.is_admin
928 928
929 929 # inheritance of global permissions like create repo/fork repo etc
930 930 user_inherit_default_permissions = user.inherit_default_permissions
931 931
932 932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
933 933 compute = caches.conditional_cache(
934 934 'short_term', 'cache_desc',
935 935 condition=cache, func=_cached_perms_data)
936 936 result = compute(user_id, scope, user_is_admin,
937 937 user_inherit_default_permissions, explicit, algo)
938 938
939 939 result_repr = []
940 940 for k in result:
941 941 result_repr.append((k, len(result[k])))
942 942
943 943 log.debug('PERMISSION tree computed %s' % (result_repr,))
944 944 return result
945 945
946 946 @property
947 947 def is_default(self):
948 948 return self.username == User.DEFAULT_USER
949 949
950 950 @property
951 951 def is_admin(self):
952 952 return self.admin
953 953
954 954 @property
955 955 def is_user_object(self):
956 956 return self.user_id is not None
957 957
958 958 @property
959 959 def repositories_admin(self):
960 960 """
961 961 Returns list of repositories you're an admin of
962 962 """
963 963 return [
964 964 x[0] for x in self.permissions['repositories'].iteritems()
965 965 if x[1] == 'repository.admin']
966 966
967 967 @property
968 968 def repository_groups_admin(self):
969 969 """
970 970 Returns list of repository groups you're an admin of
971 971 """
972 972 return [
973 973 x[0] for x in self.permissions['repositories_groups'].iteritems()
974 974 if x[1] == 'group.admin']
975 975
976 976 @property
977 977 def user_groups_admin(self):
978 978 """
979 979 Returns list of user groups you're an admin of
980 980 """
981 981 return [
982 982 x[0] for x in self.permissions['user_groups'].iteritems()
983 983 if x[1] == 'usergroup.admin']
984 984
985 985 @property
986 986 def ip_allowed(self):
987 987 """
988 988 Checks if ip_addr used in constructor is allowed from defined list of
989 989 allowed ip_addresses for user
990 990
991 991 :returns: boolean, True if ip is in allowed ip range
992 992 """
993 993 # check IP
994 994 inherit = self.inherit_default_permissions
995 995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
996 996 inherit_from_default=inherit)
997 997 @property
998 998 def personal_repo_group(self):
999 999 return RepoGroup.get_user_personal_repo_group(self.user_id)
1000 1000
1001 1001 @classmethod
1002 1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1003 1003 allowed_ips = AuthUser.get_allowed_ips(
1004 1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1005 1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1006 1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1007 1007 return True
1008 1008 else:
1009 1009 log.info('Access for IP:%s forbidden, '
1010 1010 'not in %s' % (ip_addr, allowed_ips))
1011 1011 return False
1012 1012
1013 1013 def __repr__(self):
1014 1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1015 1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1016 1016
1017 1017 def set_authenticated(self, authenticated=True):
1018 1018 if self.user_id != self.anonymous_user.user_id:
1019 1019 self.is_authenticated = authenticated
1020 1020
1021 1021 def get_cookie_store(self):
1022 1022 return {
1023 1023 'username': self.username,
1024 1024 'password': md5(self.password),
1025 1025 'user_id': self.user_id,
1026 1026 'is_authenticated': self.is_authenticated
1027 1027 }
1028 1028
1029 1029 @classmethod
1030 1030 def from_cookie_store(cls, cookie_store):
1031 1031 """
1032 1032 Creates AuthUser from a cookie store
1033 1033
1034 1034 :param cls:
1035 1035 :param cookie_store:
1036 1036 """
1037 1037 user_id = cookie_store.get('user_id')
1038 1038 username = cookie_store.get('username')
1039 1039 api_key = cookie_store.get('api_key')
1040 1040 return AuthUser(user_id, api_key, username)
1041 1041
1042 1042 @classmethod
1043 1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1044 1044 _set = set()
1045 1045
1046 1046 if inherit_from_default:
1047 1047 default_ips = UserIpMap.query().filter(
1048 1048 UserIpMap.user == User.get_default_user(cache=True))
1049 1049 if cache:
1050 1050 default_ips = default_ips.options(
1051 1051 FromCache("sql_cache_short", "get_user_ips_default"))
1052 1052
1053 1053 # populate from default user
1054 1054 for ip in default_ips:
1055 1055 try:
1056 1056 _set.add(ip.ip_addr)
1057 1057 except ObjectDeletedError:
1058 1058 # since we use heavy caching sometimes it happens that
1059 1059 # we get deleted objects here, we just skip them
1060 1060 pass
1061 1061
1062 1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1063 1063 if cache:
1064 1064 user_ips = user_ips.options(
1065 1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1066 1066
1067 1067 for ip in user_ips:
1068 1068 try:
1069 1069 _set.add(ip.ip_addr)
1070 1070 except ObjectDeletedError:
1071 1071 # since we use heavy caching sometimes it happens that we get
1072 1072 # deleted objects here, we just skip them
1073 1073 pass
1074 1074 return _set or set(['0.0.0.0/0', '::/0'])
1075 1075
1076 1076
1077 1077 def set_available_permissions(config):
1078 1078 """
1079 1079 This function will propagate pylons globals with all available defined
1080 1080 permission given in db. We don't want to check each time from db for new
1081 1081 permissions since adding a new permission also requires application restart
1082 1082 ie. to decorate new views with the newly created permission
1083 1083
1084 1084 :param config: current pylons config instance
1085 1085
1086 1086 """
1087 1087 log.info('getting information about all available permissions')
1088 1088 try:
1089 1089 sa = meta.Session
1090 1090 all_perms = sa.query(Permission).all()
1091 1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1092 1092 except Exception:
1093 1093 log.error(traceback.format_exc())
1094 1094 finally:
1095 1095 meta.Session.remove()
1096 1096
1097 1097
1098 1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1099 1099 """
1100 1100 Return the current authentication token, creating one if one doesn't
1101 1101 already exist and the save_if_missing flag is present.
1102 1102
1103 1103 :param session: pass in the pylons session, else we use the global ones
1104 1104 :param force_new: force to re-generate the token and store it in session
1105 1105 :param save_if_missing: save the newly generated token if it's missing in
1106 1106 session
1107 1107 """
1108 1108 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1109 1109 # from pyramid.csrf import get_csrf_token
1110 1110
1111 1111 if not session:
1112 1112 from pylons import session
1113 1113
1114 1114 if (csrf_token_key not in session and save_if_missing) or force_new:
1115 1115 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1116 1116 session[csrf_token_key] = token
1117 1117 if hasattr(session, 'save'):
1118 1118 session.save()
1119 1119 return session.get(csrf_token_key)
1120 1120
1121 1121
1122 1122 def get_request(perm_class):
1123 1123 from pyramid.threadlocal import get_current_request
1124 1124 pyramid_request = get_current_request()
1125 1125 if not pyramid_request:
1126 1126 # return global request of pylons in case pyramid isn't available
1127 1127 # NOTE(marcink): this should be removed after migration to pyramid
1128 1128 from pylons import request
1129 1129 return request
1130 1130 return pyramid_request
1131 1131
1132 1132
1133 1133 # CHECK DECORATORS
1134 1134 class CSRFRequired(object):
1135 1135 """
1136 1136 Decorator for authenticating a form
1137 1137
1138 1138 This decorator uses an authorization token stored in the client's
1139 1139 session for prevention of certain Cross-site request forgery (CSRF)
1140 1140 attacks (See
1141 1141 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1142 1142 information).
1143 1143
1144 1144 For use with the ``webhelpers.secure_form`` helper functions.
1145 1145
1146 1146 """
1147 1147 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1148 1148 except_methods=None):
1149 1149 self.token = token
1150 1150 self.header = header
1151 1151 self.except_methods = except_methods or []
1152 1152
1153 1153 def __call__(self, func):
1154 1154 return get_cython_compat_decorator(self.__wrapper, func)
1155 1155
1156 1156 def _get_csrf(self, _request):
1157 1157 return _request.POST.get(self.token, _request.headers.get(self.header))
1158 1158
1159 1159 def check_csrf(self, _request, cur_token):
1160 1160 supplied_token = self._get_csrf(_request)
1161 1161 return supplied_token and supplied_token == cur_token
1162 1162
1163 1163 def _get_request(self):
1164 1164 return get_request(self)
1165 1165
1166 1166 def __wrapper(self, func, *fargs, **fkwargs):
1167 1167 request = self._get_request()
1168 1168
1169 1169 if request.method in self.except_methods:
1170 1170 return func(*fargs, **fkwargs)
1171 1171
1172 1172 cur_token = get_csrf_token(save_if_missing=False)
1173 1173 if self.check_csrf(request, cur_token):
1174 1174 if request.POST.get(self.token):
1175 1175 del request.POST[self.token]
1176 1176 return func(*fargs, **fkwargs)
1177 1177 else:
1178 1178 reason = 'token-missing'
1179 1179 supplied_token = self._get_csrf(request)
1180 1180 if supplied_token and cur_token != supplied_token:
1181 1181 reason = 'token-mismatch [%s:%s]' % (
1182 1182 cur_token or ''[:6], supplied_token or ''[:6])
1183 1183
1184 1184 csrf_message = \
1185 1185 ("Cross-site request forgery detected, request denied. See "
1186 1186 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1187 1187 "more information.")
1188 1188 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1189 1189 'REMOTE_ADDR:%s, HEADERS:%s' % (
1190 1190 request, reason, request.remote_addr, request.headers))
1191 1191
1192 1192 raise HTTPForbidden(explanation=csrf_message)
1193 1193
1194 1194
1195 1195 class LoginRequired(object):
1196 1196 """
1197 1197 Must be logged in to execute this function else
1198 1198 redirect to login page
1199 1199
1200 1200 :param api_access: if enabled this checks only for valid auth token
1201 1201 and grants access based on valid token
1202 1202 """
1203 1203 def __init__(self, auth_token_access=None):
1204 1204 self.auth_token_access = auth_token_access
1205 1205
1206 1206 def __call__(self, func):
1207 1207 return get_cython_compat_decorator(self.__wrapper, func)
1208 1208
1209 1209 def _get_request(self):
1210 1210 return get_request(self)
1211 1211
1212 1212 def __wrapper(self, func, *fargs, **fkwargs):
1213 1213 from rhodecode.lib import helpers as h
1214 1214 cls = fargs[0]
1215 1215 user = cls._rhodecode_user
1216 1216 request = self._get_request()
1217 1217
1218 1218 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1219 1219 log.debug('Starting login restriction checks for user: %s' % (user,))
1220 1220 # check if our IP is allowed
1221 1221 ip_access_valid = True
1222 1222 if not user.ip_allowed:
1223 1223 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1224 1224 category='warning')
1225 1225 ip_access_valid = False
1226 1226
1227 1227 # check if we used an APIKEY and it's a valid one
1228 1228 # defined white-list of controllers which API access will be enabled
1229 1229 _auth_token = request.GET.get(
1230 1230 'auth_token', '') or request.GET.get('api_key', '')
1231 1231 auth_token_access_valid = allowed_auth_token_access(
1232 1232 loc, auth_token=_auth_token)
1233 1233
1234 1234 # explicit controller is enabled or API is in our whitelist
1235 1235 if self.auth_token_access or auth_token_access_valid:
1236 1236 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1237 1237 db_user = user.get_instance()
1238 1238
1239 1239 if db_user:
1240 1240 if self.auth_token_access:
1241 1241 roles = self.auth_token_access
1242 1242 else:
1243 1243 roles = [UserApiKeys.ROLE_HTTP]
1244 1244 token_match = db_user.authenticate_by_token(
1245 1245 _auth_token, roles=roles)
1246 1246 else:
1247 1247 log.debug('Unable to fetch db instance for auth user: %s', user)
1248 1248 token_match = False
1249 1249
1250 1250 if _auth_token and token_match:
1251 1251 auth_token_access_valid = True
1252 1252 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1253 1253 else:
1254 1254 auth_token_access_valid = False
1255 1255 if not _auth_token:
1256 1256 log.debug("AUTH TOKEN *NOT* present in request")
1257 1257 else:
1258 1258 log.warning(
1259 1259 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1260 1260
1261 1261 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1262 1262 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1263 1263 else 'AUTH_TOKEN_AUTH'
1264 1264
1265 1265 if ip_access_valid and (
1266 1266 user.is_authenticated or auth_token_access_valid):
1267 1267 log.info(
1268 1268 'user %s authenticating with:%s IS authenticated on func %s'
1269 1269 % (user, reason, loc))
1270 1270
1271 1271 # update user data to check last activity
1272 1272 user.update_lastactivity()
1273 1273 Session().commit()
1274 1274 return func(*fargs, **fkwargs)
1275 1275 else:
1276 1276 log.warning(
1277 1277 'user %s authenticating with:%s NOT authenticated on '
1278 1278 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1279 1279 % (user, reason, loc, ip_access_valid,
1280 1280 auth_token_access_valid))
1281 1281 # we preserve the get PARAM
1282 1282 came_from = request.path_qs
1283 1283 log.debug('redirecting to login page with %s' % (came_from,))
1284 1284 raise HTTPFound(
1285 1285 h.route_path('login', _query={'came_from': came_from}))
1286 1286
1287 1287
1288 1288 class NotAnonymous(object):
1289 1289 """
1290 1290 Must be logged in to execute this function else
1291 1291 redirect to login page
1292 1292 """
1293 1293
1294 1294 def __call__(self, func):
1295 1295 return get_cython_compat_decorator(self.__wrapper, func)
1296 1296
1297 1297 def _get_request(self):
1298 1298 return get_request(self)
1299 1299
1300 1300 def __wrapper(self, func, *fargs, **fkwargs):
1301 1301 import rhodecode.lib.helpers as h
1302 1302 cls = fargs[0]
1303 1303 self.user = cls._rhodecode_user
1304 1304 request = self._get_request()
1305 1305
1306 1306 log.debug('Checking if user is not anonymous @%s' % cls)
1307 1307
1308 1308 anonymous = self.user.username == User.DEFAULT_USER
1309 1309
1310 1310 if anonymous:
1311 1311 came_from = request.path_qs
1312 1312 h.flash(_('You need to be a registered user to '
1313 1313 'perform this action'),
1314 1314 category='warning')
1315 1315 raise HTTPFound(
1316 1316 h.route_path('login', _query={'came_from': came_from}))
1317 1317 else:
1318 1318 return func(*fargs, **fkwargs)
1319 1319
1320 1320
1321 1321 class XHRRequired(object):
1322 1322 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1323 1323
1324 1324 def __call__(self, func):
1325 1325 return get_cython_compat_decorator(self.__wrapper, func)
1326 1326
1327 1327 def _get_request(self):
1328 1328 return get_request(self)
1329 1329
1330 1330 def __wrapper(self, func, *fargs, **fkwargs):
1331 1331 from pylons.controllers.util import abort
1332 1332 request = self._get_request()
1333 1333
1334 1334 log.debug('Checking if request is XMLHttpRequest (XHR)')
1335 1335 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1336 1336
1337 1337 if not request.is_xhr:
1338 1338 abort(400, detail=xhr_message)
1339 1339
1340 1340 return func(*fargs, **fkwargs)
1341 1341
1342 1342
1343 1343 class HasAcceptedRepoType(object):
1344 1344 """
1345 1345 Check if requested repo is within given repo type aliases
1346 1346 """
1347 1347
1348 1348 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1349 1349
1350 1350 def __init__(self, *repo_type_list):
1351 1351 self.repo_type_list = set(repo_type_list)
1352 1352
1353 1353 def __call__(self, func):
1354 1354 return get_cython_compat_decorator(self.__wrapper, func)
1355 1355
1356 1356 def __wrapper(self, func, *fargs, **fkwargs):
1357 1357 import rhodecode.lib.helpers as h
1358 1358 cls = fargs[0]
1359 1359 rhodecode_repo = cls.rhodecode_repo
1360 1360
1361 1361 log.debug('%s checking repo type for %s in %s',
1362 1362 self.__class__.__name__,
1363 1363 rhodecode_repo.alias, self.repo_type_list)
1364 1364
1365 1365 if rhodecode_repo.alias in self.repo_type_list:
1366 1366 return func(*fargs, **fkwargs)
1367 1367 else:
1368 1368 h.flash(h.literal(
1369 1369 _('Action not supported for %s.' % rhodecode_repo.alias)),
1370 1370 category='warning')
1371 1371 raise HTTPFound(
1372 1372 h.route_path('repo_summary',
1373 1373 repo_name=cls.rhodecode_db_repo.repo_name))
1374 1374
1375 1375
1376 1376 class PermsDecorator(object):
1377 1377 """
1378 1378 Base class for controller decorators, we extract the current user from
1379 1379 the class itself, which has it stored in base controllers
1380 1380 """
1381 1381
1382 1382 def __init__(self, *required_perms):
1383 1383 self.required_perms = set(required_perms)
1384 1384
1385 1385 def __call__(self, func):
1386 1386 return get_cython_compat_decorator(self.__wrapper, func)
1387 1387
1388 1388 def _get_request(self):
1389 1389 return get_request(self)
1390 1390
1391 1391 def _get_came_from(self):
1392 1392 _request = self._get_request()
1393 1393
1394 1394 # both pylons/pyramid has this attribute
1395 1395 return _request.path_qs
1396 1396
1397 1397 def __wrapper(self, func, *fargs, **fkwargs):
1398 1398 import rhodecode.lib.helpers as h
1399 1399 cls = fargs[0]
1400 1400 _user = cls._rhodecode_user
1401 1401
1402 1402 log.debug('checking %s permissions %s for %s %s',
1403 1403 self.__class__.__name__, self.required_perms, cls, _user)
1404 1404
1405 1405 if self.check_permissions(_user):
1406 1406 log.debug('Permission granted for %s %s', cls, _user)
1407 1407 return func(*fargs, **fkwargs)
1408 1408
1409 1409 else:
1410 1410 log.debug('Permission denied for %s %s', cls, _user)
1411 1411 anonymous = _user.username == User.DEFAULT_USER
1412 1412
1413 1413 if anonymous:
1414 1414 came_from = self._get_came_from()
1415 1415 h.flash(_('You need to be signed in to view this page'),
1416 1416 category='warning')
1417 1417 raise HTTPFound(
1418 1418 h.route_path('login', _query={'came_from': came_from}))
1419 1419
1420 1420 else:
1421 1421 # redirect with 404 to prevent resource discovery
1422 1422 raise HTTPNotFound()
1423 1423
1424 1424 def check_permissions(self, user):
1425 1425 """Dummy function for overriding"""
1426 1426 raise NotImplementedError(
1427 1427 'You have to write this function in child class')
1428 1428
1429 1429
1430 1430 class HasPermissionAllDecorator(PermsDecorator):
1431 1431 """
1432 1432 Checks for access permission for all given predicates. All of them
1433 1433 have to be meet in order to fulfill the request
1434 1434 """
1435 1435
1436 1436 def check_permissions(self, user):
1437 1437 perms = user.permissions_with_scope({})
1438 1438 if self.required_perms.issubset(perms['global']):
1439 1439 return True
1440 1440 return False
1441 1441
1442 1442
1443 1443 class HasPermissionAnyDecorator(PermsDecorator):
1444 1444 """
1445 1445 Checks for access permission for any of given predicates. In order to
1446 1446 fulfill the request any of predicates must be meet
1447 1447 """
1448 1448
1449 1449 def check_permissions(self, user):
1450 1450 perms = user.permissions_with_scope({})
1451 1451 if self.required_perms.intersection(perms['global']):
1452 1452 return True
1453 1453 return False
1454 1454
1455 1455
1456 1456 class HasRepoPermissionAllDecorator(PermsDecorator):
1457 1457 """
1458 1458 Checks for access permission for all given predicates for specific
1459 1459 repository. All of them have to be meet in order to fulfill the request
1460 1460 """
1461 1461 def _get_repo_name(self):
1462 1462 _request = self._get_request()
1463 1463 return get_repo_slug(_request)
1464 1464
1465 1465 def check_permissions(self, user):
1466 1466 perms = user.permissions
1467 1467 repo_name = self._get_repo_name()
1468 1468
1469 1469 try:
1470 1470 user_perms = set([perms['repositories'][repo_name]])
1471 1471 except KeyError:
1472 1472 log.debug('cannot locate repo with name: `%s` in permissions defs',
1473 1473 repo_name)
1474 1474 return False
1475 1475
1476 1476 log.debug('checking `%s` permissions for repo `%s`',
1477 1477 user_perms, repo_name)
1478 1478 if self.required_perms.issubset(user_perms):
1479 1479 return True
1480 1480 return False
1481 1481
1482 1482
1483 1483 class HasRepoPermissionAnyDecorator(PermsDecorator):
1484 1484 """
1485 1485 Checks for access permission for any of given predicates for specific
1486 1486 repository. In order to fulfill the request any of predicates must be meet
1487 1487 """
1488 1488 def _get_repo_name(self):
1489 1489 _request = self._get_request()
1490 1490 return get_repo_slug(_request)
1491 1491
1492 1492 def check_permissions(self, user):
1493 1493 perms = user.permissions
1494 1494 repo_name = self._get_repo_name()
1495 1495
1496 1496 try:
1497 1497 user_perms = set([perms['repositories'][repo_name]])
1498 1498 except KeyError:
1499 1499 log.debug('cannot locate repo with name: `%s` in permissions defs',
1500 1500 repo_name)
1501 1501 return False
1502 1502
1503 1503 log.debug('checking `%s` permissions for repo `%s`',
1504 1504 user_perms, repo_name)
1505 1505 if self.required_perms.intersection(user_perms):
1506 1506 return True
1507 1507 return False
1508 1508
1509 1509
1510 1510 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1511 1511 """
1512 1512 Checks for access permission for all given predicates for specific
1513 1513 repository group. All of them have to be meet in order to
1514 1514 fulfill the request
1515 1515 """
1516 1516 def _get_repo_group_name(self):
1517 1517 _request = self._get_request()
1518 1518 return get_repo_group_slug(_request)
1519 1519
1520 1520 def check_permissions(self, user):
1521 1521 perms = user.permissions
1522 1522 group_name = self._get_repo_group_name()
1523 1523 try:
1524 1524 user_perms = set([perms['repositories_groups'][group_name]])
1525 1525 except KeyError:
1526 1526 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1527 1527 group_name)
1528 1528 return False
1529 1529
1530 1530 log.debug('checking `%s` permissions for repo group `%s`',
1531 1531 user_perms, group_name)
1532 1532 if self.required_perms.issubset(user_perms):
1533 1533 return True
1534 1534 return False
1535 1535
1536 1536
1537 1537 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1538 1538 """
1539 1539 Checks for access permission for any of given predicates for specific
1540 1540 repository group. In order to fulfill the request any
1541 1541 of predicates must be met
1542 1542 """
1543 1543 def _get_repo_group_name(self):
1544 1544 _request = self._get_request()
1545 1545 return get_repo_group_slug(_request)
1546 1546
1547 1547 def check_permissions(self, user):
1548 1548 perms = user.permissions
1549 1549 group_name = self._get_repo_group_name()
1550 1550
1551 1551 try:
1552 1552 user_perms = set([perms['repositories_groups'][group_name]])
1553 1553 except KeyError:
1554 1554 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1555 1555 group_name)
1556 1556 return False
1557 1557
1558 1558 log.debug('checking `%s` permissions for repo group `%s`',
1559 1559 user_perms, group_name)
1560 1560 if self.required_perms.intersection(user_perms):
1561 1561 return True
1562 1562 return False
1563 1563
1564 1564
1565 1565 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1566 1566 """
1567 1567 Checks for access permission for all given predicates for specific
1568 1568 user group. All of them have to be meet in order to fulfill the request
1569 1569 """
1570 1570 def _get_user_group_name(self):
1571 1571 _request = self._get_request()
1572 1572 return get_user_group_slug(_request)
1573 1573
1574 1574 def check_permissions(self, user):
1575 1575 perms = user.permissions
1576 1576 group_name = self._get_user_group_name()
1577 1577 try:
1578 1578 user_perms = set([perms['user_groups'][group_name]])
1579 1579 except KeyError:
1580 1580 return False
1581 1581
1582 1582 if self.required_perms.issubset(user_perms):
1583 1583 return True
1584 1584 return False
1585 1585
1586 1586
1587 1587 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1588 1588 """
1589 1589 Checks for access permission for any of given predicates for specific
1590 1590 user group. In order to fulfill the request any of predicates must be meet
1591 1591 """
1592 1592 def _get_user_group_name(self):
1593 1593 _request = self._get_request()
1594 1594 return get_user_group_slug(_request)
1595 1595
1596 1596 def check_permissions(self, user):
1597 1597 perms = user.permissions
1598 1598 group_name = self._get_user_group_name()
1599 1599 try:
1600 1600 user_perms = set([perms['user_groups'][group_name]])
1601 1601 except KeyError:
1602 1602 return False
1603 1603
1604 1604 if self.required_perms.intersection(user_perms):
1605 1605 return True
1606 1606 return False
1607 1607
1608 1608
1609 1609 # CHECK FUNCTIONS
1610 1610 class PermsFunction(object):
1611 1611 """Base function for other check functions"""
1612 1612
1613 1613 def __init__(self, *perms):
1614 1614 self.required_perms = set(perms)
1615 1615 self.repo_name = None
1616 1616 self.repo_group_name = None
1617 1617 self.user_group_name = None
1618 1618
1619 1619 def __bool__(self):
1620 1620 frame = inspect.currentframe()
1621 1621 stack_trace = traceback.format_stack(frame)
1622 1622 log.error('Checking bool value on a class instance of perm '
1623 1623 'function is not allowed: %s' % ''.join(stack_trace))
1624 1624 # rather than throwing errors, here we always return False so if by
1625 1625 # accident someone checks truth for just an instance it will always end
1626 1626 # up in returning False
1627 1627 return False
1628 1628 __nonzero__ = __bool__
1629 1629
1630 1630 def __call__(self, check_location='', user=None):
1631 1631 if not user:
1632 1632 log.debug('Using user attribute from global request')
1633 1633 # TODO: remove this someday,put as user as attribute here
1634 1634 request = self._get_request()
1635 1635 user = request.user
1636 1636
1637 1637 # init auth user if not already given
1638 1638 if not isinstance(user, AuthUser):
1639 1639 log.debug('Wrapping user %s into AuthUser', user)
1640 1640 user = AuthUser(user.user_id)
1641 1641
1642 1642 cls_name = self.__class__.__name__
1643 1643 check_scope = self._get_check_scope(cls_name)
1644 1644 check_location = check_location or 'unspecified location'
1645 1645
1646 1646 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1647 1647 self.required_perms, user, check_scope, check_location)
1648 1648 if not user:
1649 1649 log.warning('Empty user given for permission check')
1650 1650 return False
1651 1651
1652 1652 if self.check_permissions(user):
1653 1653 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1654 1654 check_scope, user, check_location)
1655 1655 return True
1656 1656
1657 1657 else:
1658 1658 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1659 1659 check_scope, user, check_location)
1660 1660 return False
1661 1661
1662 1662 def _get_request(self):
1663 1663 return get_request(self)
1664 1664
1665 1665 def _get_check_scope(self, cls_name):
1666 1666 return {
1667 1667 'HasPermissionAll': 'GLOBAL',
1668 1668 'HasPermissionAny': 'GLOBAL',
1669 1669 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1670 1670 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1671 1671 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1672 1672 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1673 1673 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1674 1674 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1675 1675 }.get(cls_name, '?:%s' % cls_name)
1676 1676
1677 1677 def check_permissions(self, user):
1678 1678 """Dummy function for overriding"""
1679 1679 raise Exception('You have to write this function in child class')
1680 1680
1681 1681
1682 1682 class HasPermissionAll(PermsFunction):
1683 1683 def check_permissions(self, user):
1684 1684 perms = user.permissions_with_scope({})
1685 1685 if self.required_perms.issubset(perms.get('global')):
1686 1686 return True
1687 1687 return False
1688 1688
1689 1689
1690 1690 class HasPermissionAny(PermsFunction):
1691 1691 def check_permissions(self, user):
1692 1692 perms = user.permissions_with_scope({})
1693 1693 if self.required_perms.intersection(perms.get('global')):
1694 1694 return True
1695 1695 return False
1696 1696
1697 1697
1698 1698 class HasRepoPermissionAll(PermsFunction):
1699 1699 def __call__(self, repo_name=None, check_location='', user=None):
1700 1700 self.repo_name = repo_name
1701 1701 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1702 1702
1703 1703 def _get_repo_name(self):
1704 1704 if not self.repo_name:
1705 1705 _request = self._get_request()
1706 1706 self.repo_name = get_repo_slug(_request)
1707 1707 return self.repo_name
1708 1708
1709 1709 def check_permissions(self, user):
1710 1710 self.repo_name = self._get_repo_name()
1711 1711 perms = user.permissions
1712 1712 try:
1713 1713 user_perms = set([perms['repositories'][self.repo_name]])
1714 1714 except KeyError:
1715 1715 return False
1716 1716 if self.required_perms.issubset(user_perms):
1717 1717 return True
1718 1718 return False
1719 1719
1720 1720
1721 1721 class HasRepoPermissionAny(PermsFunction):
1722 1722 def __call__(self, repo_name=None, check_location='', user=None):
1723 1723 self.repo_name = repo_name
1724 1724 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1725 1725
1726 1726 def _get_repo_name(self):
1727 1727 if not self.repo_name:
1728 1728 _request = self._get_request()
1729 1729 self.repo_name = get_repo_slug(_request)
1730 1730 return self.repo_name
1731 1731
1732 1732 def check_permissions(self, user):
1733 1733 self.repo_name = self._get_repo_name()
1734 1734 perms = user.permissions
1735 1735 try:
1736 1736 user_perms = set([perms['repositories'][self.repo_name]])
1737 1737 except KeyError:
1738 1738 return False
1739 1739 if self.required_perms.intersection(user_perms):
1740 1740 return True
1741 1741 return False
1742 1742
1743 1743
1744 1744 class HasRepoGroupPermissionAny(PermsFunction):
1745 1745 def __call__(self, group_name=None, check_location='', user=None):
1746 1746 self.repo_group_name = group_name
1747 1747 return super(HasRepoGroupPermissionAny, self).__call__(
1748 1748 check_location, user)
1749 1749
1750 1750 def check_permissions(self, user):
1751 1751 perms = user.permissions
1752 1752 try:
1753 1753 user_perms = set(
1754 1754 [perms['repositories_groups'][self.repo_group_name]])
1755 1755 except KeyError:
1756 1756 return False
1757 1757 if self.required_perms.intersection(user_perms):
1758 1758 return True
1759 1759 return False
1760 1760
1761 1761
1762 1762 class HasRepoGroupPermissionAll(PermsFunction):
1763 1763 def __call__(self, group_name=None, check_location='', user=None):
1764 1764 self.repo_group_name = group_name
1765 1765 return super(HasRepoGroupPermissionAll, self).__call__(
1766 1766 check_location, user)
1767 1767
1768 1768 def check_permissions(self, user):
1769 1769 perms = user.permissions
1770 1770 try:
1771 1771 user_perms = set(
1772 1772 [perms['repositories_groups'][self.repo_group_name]])
1773 1773 except KeyError:
1774 1774 return False
1775 1775 if self.required_perms.issubset(user_perms):
1776 1776 return True
1777 1777 return False
1778 1778
1779 1779
1780 1780 class HasUserGroupPermissionAny(PermsFunction):
1781 1781 def __call__(self, user_group_name=None, check_location='', user=None):
1782 1782 self.user_group_name = user_group_name
1783 1783 return super(HasUserGroupPermissionAny, self).__call__(
1784 1784 check_location, user)
1785 1785
1786 1786 def check_permissions(self, user):
1787 1787 perms = user.permissions
1788 1788 try:
1789 1789 user_perms = set([perms['user_groups'][self.user_group_name]])
1790 1790 except KeyError:
1791 1791 return False
1792 1792 if self.required_perms.intersection(user_perms):
1793 1793 return True
1794 1794 return False
1795 1795
1796 1796
1797 1797 class HasUserGroupPermissionAll(PermsFunction):
1798 1798 def __call__(self, user_group_name=None, check_location='', user=None):
1799 1799 self.user_group_name = user_group_name
1800 1800 return super(HasUserGroupPermissionAll, self).__call__(
1801 1801 check_location, user)
1802 1802
1803 1803 def check_permissions(self, user):
1804 1804 perms = user.permissions
1805 1805 try:
1806 1806 user_perms = set([perms['user_groups'][self.user_group_name]])
1807 1807 except KeyError:
1808 1808 return False
1809 1809 if self.required_perms.issubset(user_perms):
1810 1810 return True
1811 1811 return False
1812 1812
1813 1813
1814 1814 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1815 1815 class HasPermissionAnyMiddleware(object):
1816 1816 def __init__(self, *perms):
1817 1817 self.required_perms = set(perms)
1818 1818
1819 1819 def __call__(self, user, repo_name):
1820 1820 # repo_name MUST be unicode, since we handle keys in permission
1821 1821 # dict by unicode
1822 1822 repo_name = safe_unicode(repo_name)
1823 1823 user = AuthUser(user.user_id)
1824 1824 log.debug(
1825 1825 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1826 1826 self.required_perms, user, repo_name)
1827 1827
1828 1828 if self.check_permissions(user, repo_name):
1829 1829 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1830 1830 repo_name, user, 'PermissionMiddleware')
1831 1831 return True
1832 1832
1833 1833 else:
1834 1834 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1835 1835 repo_name, user, 'PermissionMiddleware')
1836 1836 return False
1837 1837
1838 1838 def check_permissions(self, user, repo_name):
1839 1839 perms = user.permissions_with_scope({'repo_name': repo_name})
1840 1840
1841 1841 try:
1842 1842 user_perms = set([perms['repositories'][repo_name]])
1843 1843 except Exception:
1844 1844 log.exception('Error while accessing user permissions')
1845 1845 return False
1846 1846
1847 1847 if self.required_perms.intersection(user_perms):
1848 1848 return True
1849 1849 return False
1850 1850
1851 1851
1852 1852 # SPECIAL VERSION TO HANDLE API AUTH
1853 1853 class _BaseApiPerm(object):
1854 1854 def __init__(self, *perms):
1855 1855 self.required_perms = set(perms)
1856 1856
1857 1857 def __call__(self, check_location=None, user=None, repo_name=None,
1858 1858 group_name=None, user_group_name=None):
1859 1859 cls_name = self.__class__.__name__
1860 1860 check_scope = 'global:%s' % (self.required_perms,)
1861 1861 if repo_name:
1862 1862 check_scope += ', repo_name:%s' % (repo_name,)
1863 1863
1864 1864 if group_name:
1865 1865 check_scope += ', repo_group_name:%s' % (group_name,)
1866 1866
1867 1867 if user_group_name:
1868 1868 check_scope += ', user_group_name:%s' % (user_group_name,)
1869 1869
1870 1870 log.debug(
1871 1871 'checking cls:%s %s %s @ %s'
1872 1872 % (cls_name, self.required_perms, check_scope, check_location))
1873 1873 if not user:
1874 1874 log.debug('Empty User passed into arguments')
1875 1875 return False
1876 1876
1877 1877 # process user
1878 1878 if not isinstance(user, AuthUser):
1879 1879 user = AuthUser(user.user_id)
1880 1880 if not check_location:
1881 1881 check_location = 'unspecified'
1882 1882 if self.check_permissions(user.permissions, repo_name, group_name,
1883 1883 user_group_name):
1884 1884 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1885 1885 check_scope, user, check_location)
1886 1886 return True
1887 1887
1888 1888 else:
1889 1889 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1890 1890 check_scope, user, check_location)
1891 1891 return False
1892 1892
1893 1893 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1894 1894 user_group_name=None):
1895 1895 """
1896 1896 implement in child class should return True if permissions are ok,
1897 1897 False otherwise
1898 1898
1899 1899 :param perm_defs: dict with permission definitions
1900 1900 :param repo_name: repo name
1901 1901 """
1902 1902 raise NotImplementedError()
1903 1903
1904 1904
1905 1905 class HasPermissionAllApi(_BaseApiPerm):
1906 1906 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1907 1907 user_group_name=None):
1908 1908 if self.required_perms.issubset(perm_defs.get('global')):
1909 1909 return True
1910 1910 return False
1911 1911
1912 1912
1913 1913 class HasPermissionAnyApi(_BaseApiPerm):
1914 1914 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1915 1915 user_group_name=None):
1916 1916 if self.required_perms.intersection(perm_defs.get('global')):
1917 1917 return True
1918 1918 return False
1919 1919
1920 1920
1921 1921 class HasRepoPermissionAllApi(_BaseApiPerm):
1922 1922 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1923 1923 user_group_name=None):
1924 1924 try:
1925 1925 _user_perms = set([perm_defs['repositories'][repo_name]])
1926 1926 except KeyError:
1927 1927 log.warning(traceback.format_exc())
1928 1928 return False
1929 1929 if self.required_perms.issubset(_user_perms):
1930 1930 return True
1931 1931 return False
1932 1932
1933 1933
1934 1934 class HasRepoPermissionAnyApi(_BaseApiPerm):
1935 1935 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1936 1936 user_group_name=None):
1937 1937 try:
1938 1938 _user_perms = set([perm_defs['repositories'][repo_name]])
1939 1939 except KeyError:
1940 1940 log.warning(traceback.format_exc())
1941 1941 return False
1942 1942 if self.required_perms.intersection(_user_perms):
1943 1943 return True
1944 1944 return False
1945 1945
1946 1946
1947 1947 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1948 1948 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1949 1949 user_group_name=None):
1950 1950 try:
1951 1951 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1952 1952 except KeyError:
1953 1953 log.warning(traceback.format_exc())
1954 1954 return False
1955 1955 if self.required_perms.intersection(_user_perms):
1956 1956 return True
1957 1957 return False
1958 1958
1959 1959
1960 1960 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1961 1961 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1962 1962 user_group_name=None):
1963 1963 try:
1964 1964 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1965 1965 except KeyError:
1966 1966 log.warning(traceback.format_exc())
1967 1967 return False
1968 1968 if self.required_perms.issubset(_user_perms):
1969 1969 return True
1970 1970 return False
1971 1971
1972 1972
1973 1973 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1974 1974 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1975 1975 user_group_name=None):
1976 1976 try:
1977 1977 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1978 1978 except KeyError:
1979 1979 log.warning(traceback.format_exc())
1980 1980 return False
1981 1981 if self.required_perms.intersection(_user_perms):
1982 1982 return True
1983 1983 return False
1984 1984
1985 1985
1986 1986 def check_ip_access(source_ip, allowed_ips=None):
1987 1987 """
1988 1988 Checks if source_ip is a subnet of any of allowed_ips.
1989 1989
1990 1990 :param source_ip:
1991 1991 :param allowed_ips: list of allowed ips together with mask
1992 1992 """
1993 1993 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1994 source_ip_address = ipaddress.ip_address(source_ip)
1994 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
1995 1995 if isinstance(allowed_ips, (tuple, list, set)):
1996 1996 for ip in allowed_ips:
1997 ip = safe_unicode(ip)
1997 1998 try:
1998 1999 network_address = ipaddress.ip_network(ip, strict=False)
1999 2000 if source_ip_address in network_address:
2000 2001 log.debug('IP %s is network %s' %
2001 2002 (source_ip_address, network_address))
2002 2003 return True
2003 2004 # for any case we cannot determine the IP, don't crash just
2004 2005 # skip it and log as error, we want to say forbidden still when
2005 2006 # sending bad IP
2006 2007 except Exception:
2007 2008 log.error(traceback.format_exc())
2008 2009 continue
2009 2010 return False
2010 2011
2011 2012
2012 2013 def get_cython_compat_decorator(wrapper, func):
2013 2014 """
2014 2015 Creates a cython compatible decorator. The previously used
2015 2016 decorator.decorator() function seems to be incompatible with cython.
2016 2017
2017 2018 :param wrapper: __wrapper method of the decorator class
2018 2019 :param func: decorated function
2019 2020 """
2020 2021 @wraps(func)
2021 2022 def local_wrapper(*args, **kwds):
2022 2023 return wrapper(func, *args, **kwds)
2023 2024 local_wrapper.__wrapped__ = func
2024 2025 return local_wrapper
2025 2026
2026 2027
@@ -1,631 +1,632 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 The base Controller API
23 23 Provides the BaseController class for subclassing. And usage in different
24 24 controllers
25 25 """
26 26
27 27 import logging
28 28 import socket
29 29
30 30 import ipaddress
31 31 import pyramid.threadlocal
32 32
33 33 from paste.auth.basic import AuthBasicAuthenticator
34 34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 36 from pylons import config, tmpl_context as c, request, url
37 37 from pylons.controllers import WSGIController
38 38 from pylons.controllers.util import redirect
39 39 from pylons.i18n import translation
40 40 # marcink: don't remove this import
41 41 from pylons.templating import render_mako as render # noqa
42 42 from pylons.i18n.translation import _
43 43 from webob.exc import HTTPFound
44 44
45 45
46 46 import rhodecode
47 47 from rhodecode.authentication.base import VCS_TYPE
48 48 from rhodecode.lib import auth, utils2
49 49 from rhodecode.lib import helpers as h
50 50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
51 51 from rhodecode.lib.exceptions import UserCreationError
52 52 from rhodecode.lib.utils import (
53 53 get_repo_slug, set_rhodecode_config, password_changed,
54 54 get_enabled_hook_classes)
55 55 from rhodecode.lib.utils2 import (
56 56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
57 57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
58 58 from rhodecode.model import meta
59 59 from rhodecode.model.db import Repository, User, ChangesetComment
60 60 from rhodecode.model.notification import NotificationModel
61 61 from rhodecode.model.scm import ScmModel
62 62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
63 63
64 64
65 65 log = logging.getLogger(__name__)
66 66
67 67
68 68 def _filter_proxy(ip):
69 69 """
70 70 Passed in IP addresses in HEADERS can be in a special format of multiple
71 71 ips. Those comma separated IPs are passed from various proxies in the
72 72 chain of request processing. The left-most being the original client.
73 73 We only care about the first IP which came from the org. client.
74 74
75 75 :param ip: ip string from headers
76 76 """
77 77 if ',' in ip:
78 78 _ips = ip.split(',')
79 79 _first_ip = _ips[0].strip()
80 80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
81 81 return _first_ip
82 82 return ip
83 83
84 84
85 85 def _filter_port(ip):
86 86 """
87 87 Removes a port from ip, there are 4 main cases to handle here.
88 88 - ipv4 eg. 127.0.0.1
89 89 - ipv6 eg. ::1
90 90 - ipv4+port eg. 127.0.0.1:8080
91 91 - ipv6+port eg. [::1]:8080
92 92
93 93 :param ip:
94 94 """
95 95 def is_ipv6(ip_addr):
96 96 if hasattr(socket, 'inet_pton'):
97 97 try:
98 98 socket.inet_pton(socket.AF_INET6, ip_addr)
99 99 except socket.error:
100 100 return False
101 101 else:
102 102 # fallback to ipaddress
103 103 try:
104 ipaddress.IPv6Address(ip_addr)
104 ipaddress.IPv6Address(safe_unicode(ip_addr))
105 105 except Exception:
106 106 return False
107 107 return True
108 108
109 109 if ':' not in ip: # must be ipv4 pure ip
110 110 return ip
111 111
112 112 if '[' in ip and ']' in ip: # ipv6 with port
113 113 return ip.split(']')[0][1:].lower()
114 114
115 115 # must be ipv6 or ipv4 with port
116 116 if is_ipv6(ip):
117 117 return ip
118 118 else:
119 119 ip, _port = ip.split(':')[:2] # means ipv4+port
120 120 return ip
121 121
122 122
123 123 def get_ip_addr(environ):
124 124 proxy_key = 'HTTP_X_REAL_IP'
125 125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
126 126 def_key = 'REMOTE_ADDR'
127 127 _filters = lambda x: _filter_port(_filter_proxy(x))
128 128
129 129 ip = environ.get(proxy_key)
130 130 if ip:
131 131 return _filters(ip)
132 132
133 133 ip = environ.get(proxy_key2)
134 134 if ip:
135 135 return _filters(ip)
136 136
137 137 ip = environ.get(def_key, '0.0.0.0')
138 138 return _filters(ip)
139 139
140 140
141 141 def get_server_ip_addr(environ, log_errors=True):
142 142 hostname = environ.get('SERVER_NAME')
143 143 try:
144 144 return socket.gethostbyname(hostname)
145 145 except Exception as e:
146 146 if log_errors:
147 147 # in some cases this lookup is not possible, and we don't want to
148 148 # make it an exception in logs
149 149 log.exception('Could not retrieve server ip address: %s', e)
150 150 return hostname
151 151
152 152
153 153 def get_server_port(environ):
154 154 return environ.get('SERVER_PORT')
155 155
156 156
157 157 def get_access_path(environ):
158 158 path = environ.get('PATH_INFO')
159 159 org_req = environ.get('pylons.original_request')
160 160 if org_req:
161 161 path = org_req.environ.get('PATH_INFO')
162 162 return path
163 163
164 164
165 165 def get_user_agent(environ):
166 166 return environ.get('HTTP_USER_AGENT')
167 167
168 168
169 169 def vcs_operation_context(
170 170 environ, repo_name, username, action, scm, check_locking=True,
171 171 is_shadow_repo=False):
172 172 """
173 173 Generate the context for a vcs operation, e.g. push or pull.
174 174
175 175 This context is passed over the layers so that hooks triggered by the
176 176 vcs operation know details like the user, the user's IP address etc.
177 177
178 178 :param check_locking: Allows to switch of the computation of the locking
179 179 data. This serves mainly the need of the simplevcs middleware to be
180 180 able to disable this for certain operations.
181 181
182 182 """
183 183 # Tri-state value: False: unlock, None: nothing, True: lock
184 184 make_lock = None
185 185 locked_by = [None, None, None]
186 186 is_anonymous = username == User.DEFAULT_USER
187 187 if not is_anonymous and check_locking:
188 188 log.debug('Checking locking on repository "%s"', repo_name)
189 189 user = User.get_by_username(username)
190 190 repo = Repository.get_by_repo_name(repo_name)
191 191 make_lock, __, locked_by = repo.get_locking_state(
192 192 action, user.user_id)
193 193
194 194 settings_model = VcsSettingsModel(repo=repo_name)
195 195 ui_settings = settings_model.get_ui_settings()
196 196
197 197 extras = {
198 198 'ip': get_ip_addr(environ),
199 199 'username': username,
200 200 'action': action,
201 201 'repository': repo_name,
202 202 'scm': scm,
203 203 'config': rhodecode.CONFIG['__file__'],
204 204 'make_lock': make_lock,
205 205 'locked_by': locked_by,
206 206 'server_url': utils2.get_server_url(environ),
207 207 'user_agent': get_user_agent(environ),
208 208 'hooks': get_enabled_hook_classes(ui_settings),
209 209 'is_shadow_repo': is_shadow_repo,
210 210 }
211 211 return extras
212 212
213 213
214 214 class BasicAuth(AuthBasicAuthenticator):
215 215
216 216 def __init__(self, realm, authfunc, registry, auth_http_code=None,
217 217 initial_call_detection=False, acl_repo_name=None):
218 218 self.realm = realm
219 219 self.initial_call = initial_call_detection
220 220 self.authfunc = authfunc
221 221 self.registry = registry
222 222 self.acl_repo_name = acl_repo_name
223 223 self._rc_auth_http_code = auth_http_code
224 224
225 225 def _get_response_from_code(self, http_code):
226 226 try:
227 227 return get_exception(safe_int(http_code))
228 228 except Exception:
229 229 log.exception('Failed to fetch response for code %s' % http_code)
230 230 return HTTPForbidden
231 231
232 232 def build_authentication(self):
233 233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 234 if self._rc_auth_http_code and not self.initial_call:
235 235 # return alternative HTTP code if alternative http return code
236 236 # is specified in RhodeCode config, but ONLY if it's not the
237 237 # FIRST call
238 238 custom_response_klass = self._get_response_from_code(
239 239 self._rc_auth_http_code)
240 240 return custom_response_klass(headers=head)
241 241 return HTTPUnauthorized(headers=head)
242 242
243 243 def authenticate(self, environ):
244 244 authorization = AUTHORIZATION(environ)
245 245 if not authorization:
246 246 return self.build_authentication()
247 247 (authmeth, auth) = authorization.split(' ', 1)
248 248 if 'basic' != authmeth.lower():
249 249 return self.build_authentication()
250 250 auth = auth.strip().decode('base64')
251 251 _parts = auth.split(':', 1)
252 252 if len(_parts) == 2:
253 253 username, password = _parts
254 254 if self.authfunc(
255 255 username, password, environ, VCS_TYPE,
256 256 registry=self.registry, acl_repo_name=self.acl_repo_name):
257 257 return username
258 258 if username and password:
259 259 # we mark that we actually executed authentication once, at
260 260 # that point we can use the alternative auth code
261 261 self.initial_call = False
262 262
263 263 return self.build_authentication()
264 264
265 265 __call__ = authenticate
266 266
267 267
268 268 def calculate_version_hash():
269 269 return md5(
270 270 config.get('beaker.session.secret', '') +
271 271 rhodecode.__version__)[:8]
272 272
273 273
274 274 def get_current_lang(request):
275 275 # NOTE(marcink): remove after pyramid move
276 276 try:
277 277 return translation.get_lang()[0]
278 278 except:
279 279 pass
280 280
281 281 return getattr(request, '_LOCALE_', None)
282 282
283 283
284 284 def attach_context_attributes(context, request, user_id):
285 285 """
286 286 Attach variables into template context called `c`, please note that
287 287 request could be pylons or pyramid request in here.
288 288 """
289
289 290 rc_config = SettingsModel().get_all_settings(cache=True)
290 291
291 292 context.rhodecode_version = rhodecode.__version__
292 293 context.rhodecode_edition = config.get('rhodecode.edition')
293 294 # unique secret + version does not leak the version but keep consistency
294 295 context.rhodecode_version_hash = calculate_version_hash()
295 296
296 297 # Default language set for the incoming request
297 298 context.language = get_current_lang(request)
298 299
299 300 # Visual options
300 301 context.visual = AttributeDict({})
301 302
302 303 # DB stored Visual Items
303 304 context.visual.show_public_icon = str2bool(
304 305 rc_config.get('rhodecode_show_public_icon'))
305 306 context.visual.show_private_icon = str2bool(
306 307 rc_config.get('rhodecode_show_private_icon'))
307 308 context.visual.stylify_metatags = str2bool(
308 309 rc_config.get('rhodecode_stylify_metatags'))
309 310 context.visual.dashboard_items = safe_int(
310 311 rc_config.get('rhodecode_dashboard_items', 100))
311 312 context.visual.admin_grid_items = safe_int(
312 313 rc_config.get('rhodecode_admin_grid_items', 100))
313 314 context.visual.repository_fields = str2bool(
314 315 rc_config.get('rhodecode_repository_fields'))
315 316 context.visual.show_version = str2bool(
316 317 rc_config.get('rhodecode_show_version'))
317 318 context.visual.use_gravatar = str2bool(
318 319 rc_config.get('rhodecode_use_gravatar'))
319 320 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
320 321 context.visual.default_renderer = rc_config.get(
321 322 'rhodecode_markup_renderer', 'rst')
322 323 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
323 324 context.visual.rhodecode_support_url = \
324 325 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
325 326
326 327 context.pre_code = rc_config.get('rhodecode_pre_code')
327 328 context.post_code = rc_config.get('rhodecode_post_code')
328 329 context.rhodecode_name = rc_config.get('rhodecode_title')
329 330 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
330 331 # if we have specified default_encoding in the request, it has more
331 332 # priority
332 333 if request.GET.get('default_encoding'):
333 334 context.default_encodings.insert(0, request.GET.get('default_encoding'))
334 335 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
335 336
336 337 # INI stored
337 338 context.labs_active = str2bool(
338 339 config.get('labs_settings_active', 'false'))
339 340 context.visual.allow_repo_location_change = str2bool(
340 341 config.get('allow_repo_location_change', True))
341 342 context.visual.allow_custom_hooks_settings = str2bool(
342 343 config.get('allow_custom_hooks_settings', True))
343 344 context.debug_style = str2bool(config.get('debug_style', False))
344 345
345 346 context.rhodecode_instanceid = config.get('instance_id')
346 347
347 348 context.visual.cut_off_limit_diff = safe_int(
348 349 config.get('cut_off_limit_diff'))
349 350 context.visual.cut_off_limit_file = safe_int(
350 351 config.get('cut_off_limit_file'))
351 352
352 353 # AppEnlight
353 354 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
354 355 context.appenlight_api_public_key = config.get(
355 356 'appenlight.api_public_key', '')
356 357 context.appenlight_server_url = config.get('appenlight.server_url', '')
357 358
358 359 # JS template context
359 360 context.template_context = {
360 361 'repo_name': None,
361 362 'repo_type': None,
362 363 'repo_landing_commit': None,
363 364 'rhodecode_user': {
364 365 'username': None,
365 366 'email': None,
366 367 'notification_status': False
367 368 },
368 369 'visual': {
369 370 'default_renderer': None
370 371 },
371 372 'commit_data': {
372 373 'commit_id': None
373 374 },
374 375 'pull_request_data': {'pull_request_id': None},
375 376 'timeago': {
376 377 'refresh_time': 120 * 1000,
377 378 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
378 379 },
379 380 'pylons_dispatch': {
380 381 # 'controller': request.environ['pylons.routes_dict']['controller'],
381 382 # 'action': request.environ['pylons.routes_dict']['action'],
382 383 },
383 384 'pyramid_dispatch': {
384 385
385 386 },
386 387 'extra': {'plugins': {}}
387 388 }
388 389 # END CONFIG VARS
389 390
390 391 # TODO: This dosn't work when called from pylons compatibility tween.
391 392 # Fix this and remove it from base controller.
392 393 # context.repo_name = get_repo_slug(request) # can be empty
393 394
394 395 diffmode = 'sideside'
395 396 if request.GET.get('diffmode'):
396 397 if request.GET['diffmode'] == 'unified':
397 398 diffmode = 'unified'
398 399 elif request.session.get('diffmode'):
399 400 diffmode = request.session['diffmode']
400 401
401 402 context.diffmode = diffmode
402 403
403 404 if request.session.get('diffmode') != diffmode:
404 405 request.session['diffmode'] = diffmode
405 406
406 407 context.csrf_token = auth.get_csrf_token(session=request.session)
407 408 context.backends = rhodecode.BACKENDS.keys()
408 409 context.backends.sort()
409 410 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
410 411
411 412 # NOTE(marcink): when migrated to pyramid we don't need to set this anymore,
412 413 # given request will ALWAYS be pyramid one
413 414 pyramid_request = pyramid.threadlocal.get_current_request()
414 415 context.pyramid_request = pyramid_request
415 416
416 417 # web case
417 418 if hasattr(pyramid_request, 'user'):
418 419 context.auth_user = pyramid_request.user
419 420 context.rhodecode_user = pyramid_request.user
420 421
421 422 # api case
422 423 if hasattr(pyramid_request, 'rpc_user'):
423 424 context.auth_user = pyramid_request.rpc_user
424 425 context.rhodecode_user = pyramid_request.rpc_user
425 426
426 427 # attach the whole call context to the request
427 428 request.call_context = context
428 429
429 430
430 431 def get_auth_user(request):
431 432 environ = request.environ
432 433 session = request.session
433 434
434 435 ip_addr = get_ip_addr(environ)
435 436 # make sure that we update permissions each time we call controller
436 437 _auth_token = (request.GET.get('auth_token', '') or
437 438 request.GET.get('api_key', ''))
438 439
439 440 if _auth_token:
440 441 # when using API_KEY we assume user exists, and
441 442 # doesn't need auth based on cookies.
442 443 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
443 444 authenticated = False
444 445 else:
445 446 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
446 447 try:
447 448 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
448 449 ip_addr=ip_addr)
449 450 except UserCreationError as e:
450 451 h.flash(e, 'error')
451 452 # container auth or other auth functions that create users
452 453 # on the fly can throw this exception signaling that there's
453 454 # issue with user creation, explanation should be provided
454 455 # in Exception itself. We then create a simple blank
455 456 # AuthUser
456 457 auth_user = AuthUser(ip_addr=ip_addr)
457 458
458 459 if password_changed(auth_user, session):
459 460 session.invalidate()
460 461 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
461 462 auth_user = AuthUser(ip_addr=ip_addr)
462 463
463 464 authenticated = cookie_store.get('is_authenticated')
464 465
465 466 if not auth_user.is_authenticated and auth_user.is_user_object:
466 467 # user is not authenticated and not empty
467 468 auth_user.set_authenticated(authenticated)
468 469
469 470 return auth_user
470 471
471 472
472 473 class BaseController(WSGIController):
473 474
474 475 def __before__(self):
475 476 """
476 477 __before__ is called before controller methods and after __call__
477 478 """
478 479 # on each call propagate settings calls into global settings.
479 480 set_rhodecode_config(config)
480 481 attach_context_attributes(c, request, self._rhodecode_user.user_id)
481 482
482 483 # TODO: Remove this when fixed in attach_context_attributes()
483 484 c.repo_name = get_repo_slug(request) # can be empty
484 485
485 486 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
486 487 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
487 488 self.sa = meta.Session
488 489 self.scm_model = ScmModel(self.sa)
489 490
490 491 # set user language
491 492 user_lang = getattr(c.pyramid_request, '_LOCALE_', None)
492 493 if user_lang:
493 494 translation.set_lang(user_lang)
494 495 log.debug('set language to %s for user %s',
495 496 user_lang, self._rhodecode_user)
496 497
497 498 def _dispatch_redirect(self, with_url, environ, start_response):
498 499 resp = HTTPFound(with_url)
499 500 environ['SCRIPT_NAME'] = '' # handle prefix middleware
500 501 environ['PATH_INFO'] = with_url
501 502 return resp(environ, start_response)
502 503
503 504 def __call__(self, environ, start_response):
504 505 """Invoke the Controller"""
505 506 # WSGIController.__call__ dispatches to the Controller method
506 507 # the request is routed to. This routing information is
507 508 # available in environ['pylons.routes_dict']
508 509 from rhodecode.lib import helpers as h
509 510
510 511 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
511 512 if environ.get('debugtoolbar.wants_pylons_context', False):
512 513 environ['debugtoolbar.pylons_context'] = c._current_obj()
513 514
514 515 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
515 516 environ['pylons.routes_dict']['action']])
516 517
517 518 self.rc_config = SettingsModel().get_all_settings(cache=True)
518 519 self.ip_addr = get_ip_addr(environ)
519 520
520 521 # The rhodecode auth user is looked up and passed through the
521 522 # environ by the pylons compatibility tween in pyramid.
522 523 # So we can just grab it from there.
523 524 auth_user = environ['rc_auth_user']
524 525
525 526 # set globals for auth user
526 527 request.user = auth_user
527 528 self._rhodecode_user = auth_user
528 529
529 530 log.info('IP: %s User: %s accessed %s [%s]' % (
530 531 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
531 532 _route_name)
532 533 )
533 534
534 535 user_obj = auth_user.get_instance()
535 536 if user_obj and user_obj.user_data.get('force_password_change'):
536 537 h.flash('You are required to change your password', 'warning',
537 538 ignore_duplicate=True)
538 539 return self._dispatch_redirect(
539 540 url('my_account_password'), environ, start_response)
540 541
541 542 return WSGIController.__call__(self, environ, start_response)
542 543
543 544
544 545 class BaseRepoController(BaseController):
545 546 """
546 547 Base class for controllers responsible for loading all needed data for
547 548 repository loaded items are
548 549
549 550 c.rhodecode_repo: instance of scm repository
550 551 c.rhodecode_db_repo: instance of db
551 552 c.repository_requirements_missing: shows that repository specific data
552 553 could not be displayed due to the missing requirements
553 554 c.repository_pull_requests: show number of open pull requests
554 555 """
555 556
556 557 def __before__(self):
557 558 super(BaseRepoController, self).__before__()
558 559 if c.repo_name: # extracted from routes
559 560 db_repo = Repository.get_by_repo_name(c.repo_name)
560 561 if not db_repo:
561 562 return
562 563
563 564 log.debug(
564 565 'Found repository in database %s with state `%s`',
565 566 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
566 567 route = getattr(request.environ.get('routes.route'), 'name', '')
567 568
568 569 # allow to delete repos that are somehow damages in filesystem
569 570 if route in ['delete_repo']:
570 571 return
571 572
572 573 if db_repo.repo_state in [Repository.STATE_PENDING]:
573 574 if route in ['repo_creating_home']:
574 575 return
575 576 check_url = url('repo_creating_home', repo_name=c.repo_name)
576 577 return redirect(check_url)
577 578
578 579 self.rhodecode_db_repo = db_repo
579 580
580 581 missing_requirements = False
581 582 try:
582 583 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
583 584 except RepositoryRequirementError as e:
584 585 missing_requirements = True
585 586 self._handle_missing_requirements(e)
586 587
587 588 if self.rhodecode_repo is None and not missing_requirements:
588 589 log.error('%s this repository is present in database but it '
589 590 'cannot be created as an scm instance', c.repo_name)
590 591
591 592 h.flash(_(
592 593 "The repository at %(repo_name)s cannot be located.") %
593 594 {'repo_name': c.repo_name},
594 595 category='error', ignore_duplicate=True)
595 596 redirect(h.route_path('home'))
596 597
597 598 # update last change according to VCS data
598 599 if not missing_requirements:
599 600 commit = db_repo.get_commit(
600 601 pre_load=["author", "date", "message", "parents"])
601 602 db_repo.update_commit_cache(commit)
602 603
603 604 # Prepare context
604 605 c.rhodecode_db_repo = db_repo
605 606 c.rhodecode_repo = self.rhodecode_repo
606 607 c.repository_requirements_missing = missing_requirements
607 608
608 609 self._update_global_counters(self.scm_model, db_repo)
609 610
610 611 def _update_global_counters(self, scm_model, db_repo):
611 612 """
612 613 Base variables that are exposed to every page of repository
613 614 """
614 615 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
615 616
616 617 def _handle_missing_requirements(self, error):
617 618 self.rhodecode_repo = None
618 619 log.error(
619 620 'Requirements are missing for repository %s: %s',
620 621 c.repo_name, error.message)
621 622
622 623 summary_url = h.route_path('repo_summary', repo_name=c.repo_name)
623 624 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
624 625 settings_update_url = url('repo', repo_name=c.repo_name)
625 626 path = request.path
626 627 should_redirect = (
627 628 path not in (summary_url, settings_update_url)
628 629 and '/settings' not in path or path == statistics_url
629 630 )
630 631 if should_redirect:
631 632 redirect(summary_url)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,907 +1,908 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 users model for RhodeCode
23 23 """
24 24
25 25 import logging
26 26 import traceback
27 27
28 28 import datetime
29 29 from pylons.i18n.translation import _
30 30
31 31 import ipaddress
32 32 from sqlalchemy.exc import DatabaseError
33 33
34 34 from rhodecode import events
35 35 from rhodecode.lib.user_log_filter import user_log_filter
36 36 from rhodecode.lib.utils2 import (
37 37 safe_unicode, get_current_rhodecode_user, action_logger_generic,
38 38 AttributeDict, str2bool)
39 39 from rhodecode.lib.exceptions import (
40 40 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
41 41 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
42 42 from rhodecode.lib.caching_query import FromCache
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.auth_token import AuthTokenModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, true, false, or_, joinedload, User, UserToPerm,
47 47 UserEmailMap, UserIpMap, UserLog)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.repo_group import RepoGroupModel
50 50
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class UserModel(BaseModel):
56 56 cls = User
57 57
58 58 def get(self, user_id, cache=False):
59 59 user = self.sa.query(User)
60 60 if cache:
61 61 user = user.options(
62 62 FromCache("sql_cache_short", "get_user_%s" % user_id))
63 63 return user.get(user_id)
64 64
65 65 def get_user(self, user):
66 66 return self._get_user(user)
67 67
68 68 def _serialize_user(self, user):
69 69 import rhodecode.lib.helpers as h
70 70
71 71 return {
72 72 'id': user.user_id,
73 73 'first_name': user.first_name,
74 74 'last_name': user.last_name,
75 75 'username': user.username,
76 76 'email': user.email,
77 77 'icon_link': h.gravatar_url(user.email, 30),
78 78 'value_display': h.escape(h.person(user)),
79 79 'value': user.username,
80 80 'value_type': 'user',
81 81 'active': user.active,
82 82 }
83 83
84 84 def get_users(self, name_contains=None, limit=20, only_active=True):
85 85
86 86 query = self.sa.query(User)
87 87 if only_active:
88 88 query = query.filter(User.active == true())
89 89
90 90 if name_contains:
91 91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
92 92 query = query.filter(
93 93 or_(
94 94 User.name.ilike(ilike_expression),
95 95 User.lastname.ilike(ilike_expression),
96 96 User.username.ilike(ilike_expression)
97 97 )
98 98 )
99 99 query = query.limit(limit)
100 100 users = query.all()
101 101
102 102 _users = [
103 103 self._serialize_user(user) for user in users
104 104 ]
105 105 return _users
106 106
107 107 def get_by_username(self, username, cache=False, case_insensitive=False):
108 108
109 109 if case_insensitive:
110 110 user = self.sa.query(User).filter(User.username.ilike(username))
111 111 else:
112 112 user = self.sa.query(User)\
113 113 .filter(User.username == username)
114 114 if cache:
115 115 name_key = _hash_key(username)
116 116 user = user.options(
117 117 FromCache("sql_cache_short", "get_user_%s" % name_key))
118 118 return user.scalar()
119 119
120 120 def get_by_email(self, email, cache=False, case_insensitive=False):
121 121 return User.get_by_email(email, case_insensitive, cache)
122 122
123 123 def get_by_auth_token(self, auth_token, cache=False):
124 124 return User.get_by_auth_token(auth_token, cache)
125 125
126 126 def get_active_user_count(self, cache=False):
127 127 return User.query().filter(
128 128 User.active == True).filter(
129 129 User.username != User.DEFAULT_USER).count()
130 130
131 131 def create(self, form_data, cur_user=None):
132 132 if not cur_user:
133 133 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
134 134
135 135 user_data = {
136 136 'username': form_data['username'],
137 137 'password': form_data['password'],
138 138 'email': form_data['email'],
139 139 'firstname': form_data['firstname'],
140 140 'lastname': form_data['lastname'],
141 141 'active': form_data['active'],
142 142 'extern_type': form_data['extern_type'],
143 143 'extern_name': form_data['extern_name'],
144 144 'admin': False,
145 145 'cur_user': cur_user
146 146 }
147 147
148 148 if 'create_repo_group' in form_data:
149 149 user_data['create_repo_group'] = str2bool(
150 150 form_data.get('create_repo_group'))
151 151
152 152 try:
153 153 if form_data.get('password_change'):
154 154 user_data['force_password_change'] = True
155 155 return UserModel().create_or_update(**user_data)
156 156 except Exception:
157 157 log.error(traceback.format_exc())
158 158 raise
159 159
160 160 def update_user(self, user, skip_attrs=None, **kwargs):
161 161 from rhodecode.lib.auth import get_crypt_password
162 162
163 163 user = self._get_user(user)
164 164 if user.username == User.DEFAULT_USER:
165 165 raise DefaultUserException(
166 166 _("You can't Edit this user since it's"
167 167 " crucial for entire application"))
168 168
169 169 # first store only defaults
170 170 user_attrs = {
171 171 'updating_user_id': user.user_id,
172 172 'username': user.username,
173 173 'password': user.password,
174 174 'email': user.email,
175 175 'firstname': user.name,
176 176 'lastname': user.lastname,
177 177 'active': user.active,
178 178 'admin': user.admin,
179 179 'extern_name': user.extern_name,
180 180 'extern_type': user.extern_type,
181 181 'language': user.user_data.get('language')
182 182 }
183 183
184 184 # in case there's new_password, that comes from form, use it to
185 185 # store password
186 186 if kwargs.get('new_password'):
187 187 kwargs['password'] = kwargs['new_password']
188 188
189 189 # cleanups, my_account password change form
190 190 kwargs.pop('current_password', None)
191 191 kwargs.pop('new_password', None)
192 192
193 193 # cleanups, user edit password change form
194 194 kwargs.pop('password_confirmation', None)
195 195 kwargs.pop('password_change', None)
196 196
197 197 # create repo group on user creation
198 198 kwargs.pop('create_repo_group', None)
199 199
200 200 # legacy forms send name, which is the firstname
201 201 firstname = kwargs.pop('name', None)
202 202 if firstname:
203 203 kwargs['firstname'] = firstname
204 204
205 205 for k, v in kwargs.items():
206 206 # skip if we don't want to update this
207 207 if skip_attrs and k in skip_attrs:
208 208 continue
209 209
210 210 user_attrs[k] = v
211 211
212 212 try:
213 213 return self.create_or_update(**user_attrs)
214 214 except Exception:
215 215 log.error(traceback.format_exc())
216 216 raise
217 217
218 218 def create_or_update(
219 219 self, username, password, email, firstname='', lastname='',
220 220 active=True, admin=False, extern_type=None, extern_name=None,
221 221 cur_user=None, plugin=None, force_password_change=False,
222 222 allow_to_create_user=True, create_repo_group=None,
223 223 updating_user_id=None, language=None, strict_creation_check=True):
224 224 """
225 225 Creates a new instance if not found, or updates current one
226 226
227 227 :param username:
228 228 :param password:
229 229 :param email:
230 230 :param firstname:
231 231 :param lastname:
232 232 :param active:
233 233 :param admin:
234 234 :param extern_type:
235 235 :param extern_name:
236 236 :param cur_user:
237 237 :param plugin: optional plugin this method was called from
238 238 :param force_password_change: toggles new or existing user flag
239 239 for password change
240 240 :param allow_to_create_user: Defines if the method can actually create
241 241 new users
242 242 :param create_repo_group: Defines if the method should also
243 243 create an repo group with user name, and owner
244 244 :param updating_user_id: if we set it up this is the user we want to
245 245 update this allows to editing username.
246 246 :param language: language of user from interface.
247 247
248 248 :returns: new User object with injected `is_new_user` attribute.
249 249 """
250 250 if not cur_user:
251 251 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
252 252
253 253 from rhodecode.lib.auth import (
254 254 get_crypt_password, check_password, generate_auth_token)
255 255 from rhodecode.lib.hooks_base import (
256 256 log_create_user, check_allowed_create_user)
257 257
258 258 def _password_change(new_user, password):
259 259 # empty password
260 260 if not new_user.password:
261 261 return False
262 262
263 263 # password check is only needed for RhodeCode internal auth calls
264 264 # in case it's a plugin we don't care
265 265 if not plugin:
266 266
267 267 # first check if we gave crypted password back, and if it
268 268 # matches it's not password change
269 269 if new_user.password == password:
270 270 return False
271 271
272 272 password_match = check_password(password, new_user.password)
273 273 if not password_match:
274 274 return True
275 275
276 276 return False
277 277
278 278 # read settings on default personal repo group creation
279 279 if create_repo_group is None:
280 280 default_create_repo_group = RepoGroupModel()\
281 281 .get_default_create_personal_repo_group()
282 282 create_repo_group = default_create_repo_group
283 283
284 284 user_data = {
285 285 'username': username,
286 286 'password': password,
287 287 'email': email,
288 288 'firstname': firstname,
289 289 'lastname': lastname,
290 290 'active': active,
291 291 'admin': admin
292 292 }
293 293
294 294 if updating_user_id:
295 295 log.debug('Checking for existing account in RhodeCode '
296 296 'database with user_id `%s` ' % (updating_user_id,))
297 297 user = User.get(updating_user_id)
298 298 else:
299 299 log.debug('Checking for existing account in RhodeCode '
300 300 'database with username `%s` ' % (username,))
301 301 user = User.get_by_username(username, case_insensitive=True)
302 302
303 303 if user is None:
304 304 # we check internal flag if this method is actually allowed to
305 305 # create new user
306 306 if not allow_to_create_user:
307 307 msg = ('Method wants to create new user, but it is not '
308 308 'allowed to do so')
309 309 log.warning(msg)
310 310 raise NotAllowedToCreateUserError(msg)
311 311
312 312 log.debug('Creating new user %s', username)
313 313
314 314 # only if we create user that is active
315 315 new_active_user = active
316 316 if new_active_user and strict_creation_check:
317 317 # raises UserCreationError if it's not allowed for any reason to
318 318 # create new active user, this also executes pre-create hooks
319 319 check_allowed_create_user(user_data, cur_user, strict_check=True)
320 320 events.trigger(events.UserPreCreate(user_data))
321 321 new_user = User()
322 322 edit = False
323 323 else:
324 324 log.debug('updating user %s', username)
325 325 events.trigger(events.UserPreUpdate(user, user_data))
326 326 new_user = user
327 327 edit = True
328 328
329 329 # we're not allowed to edit default user
330 330 if user.username == User.DEFAULT_USER:
331 331 raise DefaultUserException(
332 332 _("You can't edit this user (`%(username)s`) since it's "
333 333 "crucial for entire application") % {'username': user.username})
334 334
335 335 # inject special attribute that will tell us if User is new or old
336 336 new_user.is_new_user = not edit
337 337 # for users that didn's specify auth type, we use RhodeCode built in
338 338 from rhodecode.authentication.plugins import auth_rhodecode
339 339 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
340 340 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
341 341
342 342 try:
343 343 new_user.username = username
344 344 new_user.admin = admin
345 345 new_user.email = email
346 346 new_user.active = active
347 347 new_user.extern_name = safe_unicode(extern_name)
348 348 new_user.extern_type = safe_unicode(extern_type)
349 349 new_user.name = firstname
350 350 new_user.lastname = lastname
351 351
352 352 # set password only if creating an user or password is changed
353 353 if not edit or _password_change(new_user, password):
354 354 reason = 'new password' if edit else 'new user'
355 355 log.debug('Updating password reason=>%s', reason)
356 356 new_user.password = get_crypt_password(password) if password else None
357 357
358 358 if force_password_change:
359 359 new_user.update_userdata(force_password_change=True)
360 360 if language:
361 361 new_user.update_userdata(language=language)
362 362 new_user.update_userdata(notification_status=True)
363 363
364 364 self.sa.add(new_user)
365 365
366 366 if not edit and create_repo_group:
367 367 RepoGroupModel().create_personal_repo_group(
368 368 new_user, commit_early=False)
369 369
370 370 if not edit:
371 371 # add the RSS token
372 372 AuthTokenModel().create(username,
373 373 description='Generated feed token',
374 374 role=AuthTokenModel.cls.ROLE_FEED)
375 375 log_create_user(created_by=cur_user, **new_user.get_dict())
376 376 events.trigger(events.UserPostCreate(user_data))
377 377 return new_user
378 378 except (DatabaseError,):
379 379 log.error(traceback.format_exc())
380 380 raise
381 381
382 382 def create_registration(self, form_data):
383 383 from rhodecode.model.notification import NotificationModel
384 384 from rhodecode.model.notification import EmailNotificationModel
385 385
386 386 try:
387 387 form_data['admin'] = False
388 388 form_data['extern_name'] = 'rhodecode'
389 389 form_data['extern_type'] = 'rhodecode'
390 390 new_user = self.create(form_data)
391 391
392 392 self.sa.add(new_user)
393 393 self.sa.flush()
394 394
395 395 user_data = new_user.get_dict()
396 396 kwargs = {
397 397 # use SQLALCHEMY safe dump of user data
398 398 'user': AttributeDict(user_data),
399 399 'date': datetime.datetime.now()
400 400 }
401 401 notification_type = EmailNotificationModel.TYPE_REGISTRATION
402 402 # pre-generate the subject for notification itself
403 403 (subject,
404 404 _h, _e, # we don't care about those
405 405 body_plaintext) = EmailNotificationModel().render_email(
406 406 notification_type, **kwargs)
407 407
408 408 # create notification objects, and emails
409 409 NotificationModel().create(
410 410 created_by=new_user,
411 411 notification_subject=subject,
412 412 notification_body=body_plaintext,
413 413 notification_type=notification_type,
414 414 recipients=None, # all admins
415 415 email_kwargs=kwargs,
416 416 )
417 417
418 418 return new_user
419 419 except Exception:
420 420 log.error(traceback.format_exc())
421 421 raise
422 422
423 423 def _handle_user_repos(self, username, repositories, handle_mode=None):
424 424 _superadmin = self.cls.get_first_super_admin()
425 425 left_overs = True
426 426
427 427 from rhodecode.model.repo import RepoModel
428 428
429 429 if handle_mode == 'detach':
430 430 for obj in repositories:
431 431 obj.user = _superadmin
432 432 # set description we know why we super admin now owns
433 433 # additional repositories that were orphaned !
434 434 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
435 435 self.sa.add(obj)
436 436 left_overs = False
437 437 elif handle_mode == 'delete':
438 438 for obj in repositories:
439 439 RepoModel().delete(obj, forks='detach')
440 440 left_overs = False
441 441
442 442 # if nothing is done we have left overs left
443 443 return left_overs
444 444
445 445 def _handle_user_repo_groups(self, username, repository_groups,
446 446 handle_mode=None):
447 447 _superadmin = self.cls.get_first_super_admin()
448 448 left_overs = True
449 449
450 450 from rhodecode.model.repo_group import RepoGroupModel
451 451
452 452 if handle_mode == 'detach':
453 453 for r in repository_groups:
454 454 r.user = _superadmin
455 455 # set description we know why we super admin now owns
456 456 # additional repositories that were orphaned !
457 457 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
458 458 self.sa.add(r)
459 459 left_overs = False
460 460 elif handle_mode == 'delete':
461 461 for r in repository_groups:
462 462 RepoGroupModel().delete(r)
463 463 left_overs = False
464 464
465 465 # if nothing is done we have left overs left
466 466 return left_overs
467 467
468 468 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
469 469 _superadmin = self.cls.get_first_super_admin()
470 470 left_overs = True
471 471
472 472 from rhodecode.model.user_group import UserGroupModel
473 473
474 474 if handle_mode == 'detach':
475 475 for r in user_groups:
476 476 for user_user_group_to_perm in r.user_user_group_to_perm:
477 477 if user_user_group_to_perm.user.username == username:
478 478 user_user_group_to_perm.user = _superadmin
479 479 r.user = _superadmin
480 480 # set description we know why we super admin now owns
481 481 # additional repositories that were orphaned !
482 482 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
483 483 self.sa.add(r)
484 484 left_overs = False
485 485 elif handle_mode == 'delete':
486 486 for r in user_groups:
487 487 UserGroupModel().delete(r)
488 488 left_overs = False
489 489
490 490 # if nothing is done we have left overs left
491 491 return left_overs
492 492
493 493 def delete(self, user, cur_user=None, handle_repos=None,
494 494 handle_repo_groups=None, handle_user_groups=None):
495 495 if not cur_user:
496 496 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
497 497 user = self._get_user(user)
498 498
499 499 try:
500 500 if user.username == User.DEFAULT_USER:
501 501 raise DefaultUserException(
502 502 _(u"You can't remove this user since it's"
503 503 u" crucial for entire application"))
504 504
505 505 left_overs = self._handle_user_repos(
506 506 user.username, user.repositories, handle_repos)
507 507 if left_overs and user.repositories:
508 508 repos = [x.repo_name for x in user.repositories]
509 509 raise UserOwnsReposException(
510 510 _(u'user "%s" still owns %s repositories and cannot be '
511 511 u'removed. Switch owners or remove those repositories:%s')
512 512 % (user.username, len(repos), ', '.join(repos)))
513 513
514 514 left_overs = self._handle_user_repo_groups(
515 515 user.username, user.repository_groups, handle_repo_groups)
516 516 if left_overs and user.repository_groups:
517 517 repo_groups = [x.group_name for x in user.repository_groups]
518 518 raise UserOwnsRepoGroupsException(
519 519 _(u'user "%s" still owns %s repository groups and cannot be '
520 520 u'removed. Switch owners or remove those repository groups:%s')
521 521 % (user.username, len(repo_groups), ', '.join(repo_groups)))
522 522
523 523 left_overs = self._handle_user_user_groups(
524 524 user.username, user.user_groups, handle_user_groups)
525 525 if left_overs and user.user_groups:
526 526 user_groups = [x.users_group_name for x in user.user_groups]
527 527 raise UserOwnsUserGroupsException(
528 528 _(u'user "%s" still owns %s user groups and cannot be '
529 529 u'removed. Switch owners or remove those user groups:%s')
530 530 % (user.username, len(user_groups), ', '.join(user_groups)))
531 531
532 532 # we might change the user data with detach/delete, make sure
533 533 # the object is marked as expired before actually deleting !
534 534 self.sa.expire(user)
535 535 self.sa.delete(user)
536 536 from rhodecode.lib.hooks_base import log_delete_user
537 537 log_delete_user(deleted_by=cur_user, **user.get_dict())
538 538 except Exception:
539 539 log.error(traceback.format_exc())
540 540 raise
541 541
542 542 def reset_password_link(self, data, pwd_reset_url):
543 543 from rhodecode.lib.celerylib import tasks, run_task
544 544 from rhodecode.model.notification import EmailNotificationModel
545 545 user_email = data['email']
546 546 try:
547 547 user = User.get_by_email(user_email)
548 548 if user:
549 549 log.debug('password reset user found %s', user)
550 550
551 551 email_kwargs = {
552 552 'password_reset_url': pwd_reset_url,
553 553 'user': user,
554 554 'email': user_email,
555 555 'date': datetime.datetime.now()
556 556 }
557 557
558 558 (subject, headers, email_body,
559 559 email_body_plaintext) = EmailNotificationModel().render_email(
560 560 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
561 561
562 562 recipients = [user_email]
563 563
564 564 action_logger_generic(
565 565 'sending password reset email to user: {}'.format(
566 566 user), namespace='security.password_reset')
567 567
568 568 run_task(tasks.send_email, recipients, subject,
569 569 email_body_plaintext, email_body)
570 570
571 571 else:
572 572 log.debug("password reset email %s not found", user_email)
573 573 except Exception:
574 574 log.error(traceback.format_exc())
575 575 return False
576 576
577 577 return True
578 578
579 579 def reset_password(self, data):
580 580 from rhodecode.lib.celerylib import tasks, run_task
581 581 from rhodecode.model.notification import EmailNotificationModel
582 582 from rhodecode.lib import auth
583 583 user_email = data['email']
584 584 pre_db = True
585 585 try:
586 586 user = User.get_by_email(user_email)
587 587 new_passwd = auth.PasswordGenerator().gen_password(
588 588 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
589 589 if user:
590 590 user.password = auth.get_crypt_password(new_passwd)
591 591 # also force this user to reset his password !
592 592 user.update_userdata(force_password_change=True)
593 593
594 594 Session().add(user)
595 595
596 596 # now delete the token in question
597 597 UserApiKeys = AuthTokenModel.cls
598 598 UserApiKeys().query().filter(
599 599 UserApiKeys.api_key == data['token']).delete()
600 600
601 601 Session().commit()
602 602 log.info('successfully reset password for `%s`', user_email)
603 603
604 604 if new_passwd is None:
605 605 raise Exception('unable to generate new password')
606 606
607 607 pre_db = False
608 608
609 609 email_kwargs = {
610 610 'new_password': new_passwd,
611 611 'user': user,
612 612 'email': user_email,
613 613 'date': datetime.datetime.now()
614 614 }
615 615
616 616 (subject, headers, email_body,
617 617 email_body_plaintext) = EmailNotificationModel().render_email(
618 618 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
619 619 **email_kwargs)
620 620
621 621 recipients = [user_email]
622 622
623 623 action_logger_generic(
624 624 'sent new password to user: {} with email: {}'.format(
625 625 user, user_email), namespace='security.password_reset')
626 626
627 627 run_task(tasks.send_email, recipients, subject,
628 628 email_body_plaintext, email_body)
629 629
630 630 except Exception:
631 631 log.error('Failed to update user password')
632 632 log.error(traceback.format_exc())
633 633 if pre_db:
634 634 # we rollback only if local db stuff fails. If it goes into
635 635 # run_task, we're pass rollback state this wouldn't work then
636 636 Session().rollback()
637 637
638 638 return True
639 639
640 640 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
641 641 """
642 642 Fetches auth_user by user_id,or api_key if present.
643 643 Fills auth_user attributes with those taken from database.
644 644 Additionally set's is_authenitated if lookup fails
645 645 present in database
646 646
647 647 :param auth_user: instance of user to set attributes
648 648 :param user_id: user id to fetch by
649 649 :param api_key: api key to fetch by
650 650 :param username: username to fetch by
651 651 """
652 652 if user_id is None and api_key is None and username is None:
653 653 raise Exception('You need to pass user_id, api_key or username')
654 654
655 655 log.debug(
656 656 'doing fill data based on: user_id:%s api_key:%s username:%s',
657 657 user_id, api_key, username)
658 658 try:
659 659 dbuser = None
660 660 if user_id:
661 661 dbuser = self.get(user_id)
662 662 elif api_key:
663 663 dbuser = self.get_by_auth_token(api_key)
664 664 elif username:
665 665 dbuser = self.get_by_username(username)
666 666
667 667 if not dbuser:
668 668 log.warning(
669 669 'Unable to lookup user by id:%s api_key:%s username:%s',
670 670 user_id, api_key, username)
671 671 return False
672 672 if not dbuser.active:
673 673 log.debug('User `%s:%s` is inactive, skipping fill data',
674 674 username, user_id)
675 675 return False
676 676
677 677 log.debug('filling user:%s data', dbuser)
678 678
679 679 # TODO: johbo: Think about this and find a clean solution
680 680 user_data = dbuser.get_dict()
681 681 user_data.update(dbuser.get_api_data(include_secrets=True))
682 682 user_data.update({
683 683 # set explicit the safe escaped values
684 684 'first_name': dbuser.first_name,
685 685 'last_name': dbuser.last_name,
686 686 })
687 687
688 688 for k, v in user_data.iteritems():
689 689 # properties of auth user we dont update
690 690 if k not in ['auth_tokens', 'permissions']:
691 691 setattr(auth_user, k, v)
692 692
693 693 # few extras
694 694 setattr(auth_user, 'feed_token', dbuser.feed_token)
695 695 except Exception:
696 696 log.error(traceback.format_exc())
697 697 auth_user.is_authenticated = False
698 698 return False
699 699
700 700 return True
701 701
702 702 def has_perm(self, user, perm):
703 703 perm = self._get_perm(perm)
704 704 user = self._get_user(user)
705 705
706 706 return UserToPerm.query().filter(UserToPerm.user == user)\
707 707 .filter(UserToPerm.permission == perm).scalar() is not None
708 708
709 709 def grant_perm(self, user, perm):
710 710 """
711 711 Grant user global permissions
712 712
713 713 :param user:
714 714 :param perm:
715 715 """
716 716 user = self._get_user(user)
717 717 perm = self._get_perm(perm)
718 718 # if this permission is already granted skip it
719 719 _perm = UserToPerm.query()\
720 720 .filter(UserToPerm.user == user)\
721 721 .filter(UserToPerm.permission == perm)\
722 722 .scalar()
723 723 if _perm:
724 724 return
725 725 new = UserToPerm()
726 726 new.user = user
727 727 new.permission = perm
728 728 self.sa.add(new)
729 729 return new
730 730
731 731 def revoke_perm(self, user, perm):
732 732 """
733 733 Revoke users global permissions
734 734
735 735 :param user:
736 736 :param perm:
737 737 """
738 738 user = self._get_user(user)
739 739 perm = self._get_perm(perm)
740 740
741 741 obj = UserToPerm.query()\
742 742 .filter(UserToPerm.user == user)\
743 743 .filter(UserToPerm.permission == perm)\
744 744 .scalar()
745 745 if obj:
746 746 self.sa.delete(obj)
747 747
748 748 def add_extra_email(self, user, email):
749 749 """
750 750 Adds email address to UserEmailMap
751 751
752 752 :param user:
753 753 :param email:
754 754 """
755 755 from rhodecode.model import forms
756 756 form = forms.UserExtraEmailForm()()
757 757 data = form.to_python({'email': email})
758 758 user = self._get_user(user)
759 759
760 760 obj = UserEmailMap()
761 761 obj.user = user
762 762 obj.email = data['email']
763 763 self.sa.add(obj)
764 764 return obj
765 765
766 766 def delete_extra_email(self, user, email_id):
767 767 """
768 768 Removes email address from UserEmailMap
769 769
770 770 :param user:
771 771 :param email_id:
772 772 """
773 773 user = self._get_user(user)
774 774 obj = UserEmailMap.query().get(email_id)
775 775 if obj and obj.user_id == user.user_id:
776 776 self.sa.delete(obj)
777 777
778 778 def parse_ip_range(self, ip_range):
779 779 ip_list = []
780
780 781 def make_unique(value):
781 782 seen = []
782 783 return [c for c in value if not (c in seen or seen.append(c))]
783 784
784 785 # firsts split by commas
785 786 for ip_range in ip_range.split(','):
786 787 if not ip_range:
787 788 continue
788 789 ip_range = ip_range.strip()
789 790 if '-' in ip_range:
790 791 start_ip, end_ip = ip_range.split('-', 1)
791 start_ip = ipaddress.ip_address(start_ip.strip())
792 end_ip = ipaddress.ip_address(end_ip.strip())
792 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
793 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
793 794 parsed_ip_range = []
794 795
795 796 for index in xrange(int(start_ip), int(end_ip) + 1):
796 797 new_ip = ipaddress.ip_address(index)
797 798 parsed_ip_range.append(str(new_ip))
798 799 ip_list.extend(parsed_ip_range)
799 800 else:
800 801 ip_list.append(ip_range)
801 802
802 803 return make_unique(ip_list)
803 804
804 805 def add_extra_ip(self, user, ip, description=None):
805 806 """
806 807 Adds ip address to UserIpMap
807 808
808 809 :param user:
809 810 :param ip:
810 811 """
811 812 from rhodecode.model import forms
812 813 form = forms.UserExtraIpForm()()
813 814 data = form.to_python({'ip': ip})
814 815 user = self._get_user(user)
815 816
816 817 obj = UserIpMap()
817 818 obj.user = user
818 819 obj.ip_addr = data['ip']
819 820 obj.description = description
820 821 self.sa.add(obj)
821 822 return obj
822 823
823 824 def delete_extra_ip(self, user, ip_id):
824 825 """
825 826 Removes ip address from UserIpMap
826 827
827 828 :param user:
828 829 :param ip_id:
829 830 """
830 831 user = self._get_user(user)
831 832 obj = UserIpMap.query().get(ip_id)
832 833 if obj and obj.user_id == user.user_id:
833 834 self.sa.delete(obj)
834 835
835 836 def get_accounts_in_creation_order(self, current_user=None):
836 837 """
837 838 Get accounts in order of creation for deactivation for license limits
838 839
839 840 pick currently logged in user, and append to the list in position 0
840 841 pick all super-admins in order of creation date and add it to the list
841 842 pick all other accounts in order of creation and add it to the list.
842 843
843 844 Based on that list, the last accounts can be disabled as they are
844 845 created at the end and don't include any of the super admins as well
845 846 as the current user.
846 847
847 848 :param current_user: optionally current user running this operation
848 849 """
849 850
850 851 if not current_user:
851 852 current_user = get_current_rhodecode_user()
852 853 active_super_admins = [
853 854 x.user_id for x in User.query()
854 855 .filter(User.user_id != current_user.user_id)
855 856 .filter(User.active == true())
856 857 .filter(User.admin == true())
857 858 .order_by(User.created_on.asc())]
858 859
859 860 active_regular_users = [
860 861 x.user_id for x in User.query()
861 862 .filter(User.user_id != current_user.user_id)
862 863 .filter(User.active == true())
863 864 .filter(User.admin == false())
864 865 .order_by(User.created_on.asc())]
865 866
866 867 list_of_accounts = [current_user.user_id]
867 868 list_of_accounts += active_super_admins
868 869 list_of_accounts += active_regular_users
869 870
870 871 return list_of_accounts
871 872
872 873 def deactivate_last_users(self, expected_users):
873 874 """
874 875 Deactivate accounts that are over the license limits.
875 876 Algorithm of which accounts to disabled is based on the formula:
876 877
877 878 Get current user, then super admins in creation order, then regular
878 879 active users in creation order.
879 880
880 881 Using that list we mark all accounts from the end of it as inactive.
881 882 This way we block only latest created accounts.
882 883
883 884 :param expected_users: list of users in special order, we deactivate
884 885 the end N ammoun of users from that list
885 886 """
886 887
887 888 list_of_accounts = self.get_accounts_in_creation_order()
888 889
889 890 for acc_id in list_of_accounts[expected_users + 1:]:
890 891 user = User.get(acc_id)
891 892 log.info('Deactivating account %s for license unlock', user)
892 893 user.active = False
893 894 Session().add(user)
894 895 Session().commit()
895 896
896 897 return
897 898
898 899 def get_user_log(self, user, filter_term):
899 900 user_log = UserLog.query()\
900 901 .filter(or_(UserLog.user_id == user.user_id,
901 902 UserLog.username == user.username))\
902 903 .options(joinedload(UserLog.user))\
903 904 .options(joinedload(UserLog.repository))\
904 905 .order_by(UserLog.action_date.desc())
905 906
906 907 user_log = user_log_filter(user_log, filter_term)
907 908 return user_log
@@ -1,140 +1,140 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import logging
24 24
25 25
26 26 import ipaddress
27 27 import colander
28 28
29 29 from rhodecode.translation import _
30 from rhodecode.lib.utils2 import glob2re
30 from rhodecode.lib.utils2 import glob2re, safe_unicode
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 def ip_addr_validator(node, value):
36 36 try:
37 37 # this raises an ValueError if address is not IpV4 or IpV6
38 ipaddress.ip_network(value, strict=False)
38 ipaddress.ip_network(safe_unicode(value), strict=False)
39 39 except ValueError:
40 40 msg = _(u'Please enter a valid IPv4 or IpV6 address')
41 41 raise colander.Invalid(node, msg)
42 42
43 43
44 44 class IpAddrValidator(object):
45 45 def __init__(self, strict=True):
46 46 self.strict = strict
47 47
48 48 def __call__(self, node, value):
49 49 try:
50 50 # this raises an ValueError if address is not IpV4 or IpV6
51 ipaddress.ip_network(value, strict=self.strict)
51 ipaddress.ip_network(safe_unicode(value), strict=self.strict)
52 52 except ValueError:
53 53 msg = _(u'Please enter a valid IPv4 or IpV6 address')
54 54 raise colander.Invalid(node, msg)
55 55
56 56
57 57 def glob_validator(node, value):
58 58 try:
59 59 re.compile('^' + glob2re(value) + '$')
60 60 except Exception:
61 61 msg = _(u'Invalid glob pattern')
62 62 raise colander.Invalid(node, msg)
63 63
64 64
65 65 def valid_name_validator(node, value):
66 66 from rhodecode.model.validation_schema import types
67 67 if value is types.RootLocation:
68 68 return
69 69
70 70 msg = _('Name must start with a letter or number. Got `{}`').format(value)
71 71 if not re.match(r'^[a-zA-z0-9]{1,}', value):
72 72 raise colander.Invalid(node, msg)
73 73
74 74
75 75 class InvalidCloneUrl(Exception):
76 76 allowed_prefixes = ()
77 77
78 78
79 79 def url_validator(url, repo_type, config):
80 80 from rhodecode.lib.vcs.backends.hg import MercurialRepository
81 81 from rhodecode.lib.vcs.backends.git import GitRepository
82 82 from rhodecode.lib.vcs.backends.svn import SubversionRepository
83 83
84 84 if repo_type == 'hg':
85 85 allowed_prefixes = ('http', 'svn+http', 'git+http')
86 86
87 87 if 'http' in url[:4]:
88 88 # initially check if it's at least the proper URL
89 89 # or does it pass basic auth
90 90
91 91 MercurialRepository.check_url(url, config)
92 92 elif 'svn+http' in url[:8]: # svn->hg import
93 93 SubversionRepository.check_url(url, config)
94 94 elif 'git+http' in url[:8]: # git->hg import
95 95 raise NotImplementedError()
96 96 else:
97 97 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
98 98 'Allowed url must start with one of %s'
99 99 % (url, ','.join(allowed_prefixes)))
100 100 exc.allowed_prefixes = allowed_prefixes
101 101 raise exc
102 102
103 103 elif repo_type == 'git':
104 104 allowed_prefixes = ('http', 'svn+http', 'hg+http')
105 105 if 'http' in url[:4]:
106 106 # initially check if it's at least the proper URL
107 107 # or does it pass basic auth
108 108 GitRepository.check_url(url, config)
109 109 elif 'svn+http' in url[:8]: # svn->git import
110 110 raise NotImplementedError()
111 111 elif 'hg+http' in url[:8]: # hg->git import
112 112 raise NotImplementedError()
113 113 else:
114 114 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
115 115 'Allowed url must start with one of %s'
116 116 % (url, ','.join(allowed_prefixes)))
117 117 exc.allowed_prefixes = allowed_prefixes
118 118 raise exc
119 119
120 120
121 121 class CloneUriValidator(object):
122 122 def __init__(self, repo_type):
123 123 self.repo_type = repo_type
124 124
125 125 def __call__(self, node, value):
126 126 from rhodecode.lib.utils import make_db_config
127 127 try:
128 128 config = make_db_config(clear_session=False)
129 129 url_validator(value, self.repo_type, config)
130 130 except InvalidCloneUrl as e:
131 131 log.warning(e)
132 132 msg = _(u'Invalid clone url, provide a valid clone '
133 133 u'url starting with one of {allowed_prefixes}').format(
134 134 allowed_prefixes=e.allowed_prefixes)
135 135 raise colander.Invalid(node, msg)
136 136 except Exception:
137 137 log.exception('Url validation failed')
138 138 msg = _(u'invalid clone url for {repo_type} repository').format(
139 139 repo_type=self.repo_type)
140 140 raise colander.Invalid(node, msg)
@@ -1,1122 +1,1122 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Set of generic validators
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 from collections import defaultdict
29 29
30 30 import formencode
31 31 import ipaddress
32 32 from formencode.validators import (
33 33 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
34 34 NotEmpty, IPAddress, CIDR, String, FancyValidator
35 35 )
36 36 from pylons.i18n.translation import _
37 37 from sqlalchemy.sql.expression import true
38 38 from sqlalchemy.util import OrderedSet
39 39 from webhelpers.pylonslib.secure_form import authentication_token
40 40
41 41 from rhodecode.authentication import (
42 42 legacy_plugin_prefix, _import_legacy_plugin)
43 43 from rhodecode.authentication.base import loadplugin
44 44 from rhodecode.config.routing import ADMIN_PREFIX
45 45 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
46 46 from rhodecode.lib.utils import repo_name_slug, make_db_config
47 from rhodecode.lib.utils2 import safe_int, str2bool, aslist, md5
47 from rhodecode.lib.utils2 import safe_int, str2bool, aslist, md5, safe_unicode
48 48 from rhodecode.lib.vcs.backends.git.repository import GitRepository
49 49 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
50 50 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
51 51 from rhodecode.model.db import (
52 52 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54
55 55 # silence warnings and pylint
56 56 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
57 57 NotEmpty, IPAddress, CIDR, String, FancyValidator
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class _Missing(object):
63 63 pass
64 64
65 65 Missing = _Missing()
66 66
67 67
68 68 class StateObj(object):
69 69 """
70 70 this is needed to translate the messages using _() in validators
71 71 """
72 72 _ = staticmethod(_)
73 73
74 74
75 75 def M(self, key, state=None, **kwargs):
76 76 """
77 77 returns string from self.message based on given key,
78 78 passed kw params are used to substitute %(named)s params inside
79 79 translated strings
80 80
81 81 :param msg:
82 82 :param state:
83 83 """
84 84 if state is None:
85 85 state = StateObj()
86 86 else:
87 87 state._ = staticmethod(_)
88 88 # inject validator into state object
89 89 return self.message(key, state, **kwargs)
90 90
91 91
92 92 def UniqueList(convert=None):
93 93 class _UniqueList(formencode.FancyValidator):
94 94 """
95 95 Unique List !
96 96 """
97 97 messages = {
98 98 'empty': _(u'Value cannot be an empty list'),
99 99 'missing_value': _(u'Value cannot be an empty list'),
100 100 }
101 101
102 102 def _to_python(self, value, state):
103 103 ret_val = []
104 104
105 105 def make_unique(value):
106 106 seen = []
107 107 return [c for c in value if not (c in seen or seen.append(c))]
108 108
109 109 if isinstance(value, list):
110 110 ret_val = make_unique(value)
111 111 elif isinstance(value, set):
112 112 ret_val = make_unique(list(value))
113 113 elif isinstance(value, tuple):
114 114 ret_val = make_unique(list(value))
115 115 elif value is None:
116 116 ret_val = []
117 117 else:
118 118 ret_val = [value]
119 119
120 120 if convert:
121 121 ret_val = map(convert, ret_val)
122 122 return ret_val
123 123
124 124 def empty_value(self, value):
125 125 return []
126 126
127 127 return _UniqueList
128 128
129 129
130 130 def UniqueListFromString():
131 131 class _UniqueListFromString(UniqueList()):
132 132 def _to_python(self, value, state):
133 133 if isinstance(value, basestring):
134 134 value = aslist(value, ',')
135 135 return super(_UniqueListFromString, self)._to_python(value, state)
136 136 return _UniqueListFromString
137 137
138 138
139 139 def ValidSvnPattern(section, repo_name=None):
140 140 class _validator(formencode.validators.FancyValidator):
141 141 messages = {
142 142 'pattern_exists': _(u'Pattern already exists'),
143 143 }
144 144
145 145 def validate_python(self, value, state):
146 146 if not value:
147 147 return
148 148 model = VcsSettingsModel(repo=repo_name)
149 149 ui_settings = model.get_svn_patterns(section=section)
150 150 for entry in ui_settings:
151 151 if value == entry.value:
152 152 msg = M(self, 'pattern_exists', state)
153 153 raise formencode.Invalid(msg, value, state)
154 154 return _validator
155 155
156 156
157 157 def ValidUsername(edit=False, old_data={}):
158 158 class _validator(formencode.validators.FancyValidator):
159 159 messages = {
160 160 'username_exists': _(u'Username "%(username)s" already exists'),
161 161 'system_invalid_username':
162 162 _(u'Username "%(username)s" is forbidden'),
163 163 'invalid_username':
164 164 _(u'Username may only contain alphanumeric characters '
165 165 u'underscores, periods or dashes and must begin with '
166 166 u'alphanumeric character or underscore')
167 167 }
168 168
169 169 def validate_python(self, value, state):
170 170 if value in ['default', 'new_user']:
171 171 msg = M(self, 'system_invalid_username', state, username=value)
172 172 raise formencode.Invalid(msg, value, state)
173 173 # check if user is unique
174 174 old_un = None
175 175 if edit:
176 176 old_un = User.get(old_data.get('user_id')).username
177 177
178 178 if old_un != value or not edit:
179 179 if User.get_by_username(value, case_insensitive=True):
180 180 msg = M(self, 'username_exists', state, username=value)
181 181 raise formencode.Invalid(msg, value, state)
182 182
183 183 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
184 184 is None):
185 185 msg = M(self, 'invalid_username', state)
186 186 raise formencode.Invalid(msg, value, state)
187 187 return _validator
188 188
189 189
190 190 def ValidRegex(msg=None):
191 191 class _validator(formencode.validators.Regex):
192 192 messages = {'invalid': msg or _(u'The input is not valid')}
193 193 return _validator
194 194
195 195
196 196 def ValidRepoUser(allow_disabled=False):
197 197 class _validator(formencode.validators.FancyValidator):
198 198 messages = {
199 199 'invalid_username': _(u'Username %(username)s is not valid'),
200 200 'disabled_username': _(u'Username %(username)s is disabled')
201 201 }
202 202
203 203 def validate_python(self, value, state):
204 204 try:
205 205 user = User.query().filter(User.username == value).one()
206 206 except Exception:
207 207 msg = M(self, 'invalid_username', state, username=value)
208 208 raise formencode.Invalid(
209 209 msg, value, state, error_dict={'username': msg}
210 210 )
211 211 if user and (not allow_disabled and not user.active):
212 212 msg = M(self, 'disabled_username', state, username=value)
213 213 raise formencode.Invalid(
214 214 msg, value, state, error_dict={'username': msg}
215 215 )
216 216
217 217 return _validator
218 218
219 219
220 220 def ValidUserGroup(edit=False, old_data={}):
221 221 class _validator(formencode.validators.FancyValidator):
222 222 messages = {
223 223 'invalid_group': _(u'Invalid user group name'),
224 224 'group_exist': _(u'User group "%(usergroup)s" already exists'),
225 225 'invalid_usergroup_name':
226 226 _(u'user group name may only contain alphanumeric '
227 227 u'characters underscores, periods or dashes and must begin '
228 228 u'with alphanumeric character')
229 229 }
230 230
231 231 def validate_python(self, value, state):
232 232 if value in ['default']:
233 233 msg = M(self, 'invalid_group', state)
234 234 raise formencode.Invalid(
235 235 msg, value, state, error_dict={'users_group_name': msg}
236 236 )
237 237 # check if group is unique
238 238 old_ugname = None
239 239 if edit:
240 240 old_id = old_data.get('users_group_id')
241 241 old_ugname = UserGroup.get(old_id).users_group_name
242 242
243 243 if old_ugname != value or not edit:
244 244 is_existing_group = UserGroup.get_by_group_name(
245 245 value, case_insensitive=True)
246 246 if is_existing_group:
247 247 msg = M(self, 'group_exist', state, usergroup=value)
248 248 raise formencode.Invalid(
249 249 msg, value, state, error_dict={'users_group_name': msg}
250 250 )
251 251
252 252 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
253 253 msg = M(self, 'invalid_usergroup_name', state)
254 254 raise formencode.Invalid(
255 255 msg, value, state, error_dict={'users_group_name': msg}
256 256 )
257 257
258 258 return _validator
259 259
260 260
261 261 def ValidRepoGroup(edit=False, old_data={}, can_create_in_root=False):
262 262 class _validator(formencode.validators.FancyValidator):
263 263 messages = {
264 264 'group_parent_id': _(u'Cannot assign this group as parent'),
265 265 'group_exists': _(u'Group "%(group_name)s" already exists'),
266 266 'repo_exists': _(u'Repository with name "%(group_name)s" '
267 267 u'already exists'),
268 268 'permission_denied': _(u"no permission to store repository group"
269 269 u"in this location"),
270 270 'permission_denied_root': _(
271 271 u"no permission to store repository group "
272 272 u"in root location")
273 273 }
274 274
275 275 def _to_python(self, value, state):
276 276 group_name = repo_name_slug(value.get('group_name', ''))
277 277 group_parent_id = safe_int(value.get('group_parent_id'))
278 278 gr = RepoGroup.get(group_parent_id)
279 279 if gr:
280 280 parent_group_path = gr.full_path
281 281 # value needs to be aware of group name in order to check
282 282 # db key This is an actual just the name to store in the
283 283 # database
284 284 group_name_full = (
285 285 parent_group_path + RepoGroup.url_sep() + group_name)
286 286 else:
287 287 group_name_full = group_name
288 288
289 289 value['group_name'] = group_name
290 290 value['group_name_full'] = group_name_full
291 291 value['group_parent_id'] = group_parent_id
292 292 return value
293 293
294 294 def validate_python(self, value, state):
295 295
296 296 old_group_name = None
297 297 group_name = value.get('group_name')
298 298 group_name_full = value.get('group_name_full')
299 299 group_parent_id = safe_int(value.get('group_parent_id'))
300 300 if group_parent_id == -1:
301 301 group_parent_id = None
302 302
303 303 group_obj = RepoGroup.get(old_data.get('group_id'))
304 304 parent_group_changed = False
305 305 if edit:
306 306 old_group_name = group_obj.group_name
307 307 old_group_parent_id = group_obj.group_parent_id
308 308
309 309 if group_parent_id != old_group_parent_id:
310 310 parent_group_changed = True
311 311
312 312 # TODO: mikhail: the following if statement is not reached
313 313 # since group_parent_id's OneOf validation fails before.
314 314 # Can be removed.
315 315
316 316 # check against setting a parent of self
317 317 parent_of_self = (
318 318 old_data['group_id'] == group_parent_id
319 319 if group_parent_id else False
320 320 )
321 321 if parent_of_self:
322 322 msg = M(self, 'group_parent_id', state)
323 323 raise formencode.Invalid(
324 324 msg, value, state, error_dict={'group_parent_id': msg}
325 325 )
326 326
327 327 # group we're moving current group inside
328 328 child_group = None
329 329 if group_parent_id:
330 330 child_group = RepoGroup.query().filter(
331 331 RepoGroup.group_id == group_parent_id).scalar()
332 332
333 333 # do a special check that we cannot move a group to one of
334 334 # it's children
335 335 if edit and child_group:
336 336 parents = [x.group_id for x in child_group.parents]
337 337 move_to_children = old_data['group_id'] in parents
338 338 if move_to_children:
339 339 msg = M(self, 'group_parent_id', state)
340 340 raise formencode.Invalid(
341 341 msg, value, state, error_dict={'group_parent_id': msg})
342 342
343 343 # Check if we have permission to store in the parent.
344 344 # Only check if the parent group changed.
345 345 if parent_group_changed:
346 346 if child_group is None:
347 347 if not can_create_in_root:
348 348 msg = M(self, 'permission_denied_root', state)
349 349 raise formencode.Invalid(
350 350 msg, value, state,
351 351 error_dict={'group_parent_id': msg})
352 352 else:
353 353 valid = HasRepoGroupPermissionAny('group.admin')
354 354 forbidden = not valid(
355 355 child_group.group_name, 'can create group validator')
356 356 if forbidden:
357 357 msg = M(self, 'permission_denied', state)
358 358 raise formencode.Invalid(
359 359 msg, value, state,
360 360 error_dict={'group_parent_id': msg})
361 361
362 362 # if we change the name or it's new group, check for existing names
363 363 # or repositories with the same name
364 364 if old_group_name != group_name_full or not edit:
365 365 # check group
366 366 gr = RepoGroup.get_by_group_name(group_name_full)
367 367 if gr:
368 368 msg = M(self, 'group_exists', state, group_name=group_name)
369 369 raise formencode.Invalid(
370 370 msg, value, state, error_dict={'group_name': msg})
371 371
372 372 # check for same repo
373 373 repo = Repository.get_by_repo_name(group_name_full)
374 374 if repo:
375 375 msg = M(self, 'repo_exists', state, group_name=group_name)
376 376 raise formencode.Invalid(
377 377 msg, value, state, error_dict={'group_name': msg})
378 378
379 379 return _validator
380 380
381 381
382 382 def ValidPassword():
383 383 class _validator(formencode.validators.FancyValidator):
384 384 messages = {
385 385 'invalid_password':
386 386 _(u'Invalid characters (non-ascii) in password')
387 387 }
388 388
389 389 def validate_python(self, value, state):
390 390 try:
391 391 (value or '').decode('ascii')
392 392 except UnicodeError:
393 393 msg = M(self, 'invalid_password', state)
394 394 raise formencode.Invalid(msg, value, state,)
395 395 return _validator
396 396
397 397
398 398 def ValidOldPassword(username):
399 399 class _validator(formencode.validators.FancyValidator):
400 400 messages = {
401 401 'invalid_password': _(u'Invalid old password')
402 402 }
403 403
404 404 def validate_python(self, value, state):
405 405 from rhodecode.authentication.base import authenticate, HTTP_TYPE
406 406 if not authenticate(username, value, '', HTTP_TYPE):
407 407 msg = M(self, 'invalid_password', state)
408 408 raise formencode.Invalid(
409 409 msg, value, state, error_dict={'current_password': msg}
410 410 )
411 411 return _validator
412 412
413 413
414 414 def ValidPasswordsMatch(
415 415 passwd='new_password', passwd_confirmation='password_confirmation'):
416 416 class _validator(formencode.validators.FancyValidator):
417 417 messages = {
418 418 'password_mismatch': _(u'Passwords do not match'),
419 419 }
420 420
421 421 def validate_python(self, value, state):
422 422
423 423 pass_val = value.get('password') or value.get(passwd)
424 424 if pass_val != value[passwd_confirmation]:
425 425 msg = M(self, 'password_mismatch', state)
426 426 raise formencode.Invalid(
427 427 msg, value, state,
428 428 error_dict={passwd: msg, passwd_confirmation: msg}
429 429 )
430 430 return _validator
431 431
432 432
433 433 def ValidAuth():
434 434 class _validator(formencode.validators.FancyValidator):
435 435 messages = {
436 436 'invalid_password': _(u'invalid password'),
437 437 'invalid_username': _(u'invalid user name'),
438 438 'disabled_account': _(u'Your account is disabled')
439 439 }
440 440
441 441 def validate_python(self, value, state):
442 442 from rhodecode.authentication.base import authenticate, HTTP_TYPE
443 443
444 444 password = value['password']
445 445 username = value['username']
446 446
447 447 if not authenticate(username, password, '', HTTP_TYPE,
448 448 skip_missing=True):
449 449 user = User.get_by_username(username)
450 450 if user and not user.active:
451 451 log.warning('user %s is disabled', username)
452 452 msg = M(self, 'disabled_account', state)
453 453 raise formencode.Invalid(
454 454 msg, value, state, error_dict={'username': msg}
455 455 )
456 456 else:
457 457 log.warning('user `%s` failed to authenticate', username)
458 458 msg = M(self, 'invalid_username', state)
459 459 msg2 = M(self, 'invalid_password', state)
460 460 raise formencode.Invalid(
461 461 msg, value, state,
462 462 error_dict={'username': msg, 'password': msg2}
463 463 )
464 464 return _validator
465 465
466 466
467 467 def ValidAuthToken():
468 468 class _validator(formencode.validators.FancyValidator):
469 469 messages = {
470 470 'invalid_token': _(u'Token mismatch')
471 471 }
472 472
473 473 def validate_python(self, value, state):
474 474 if value != authentication_token():
475 475 msg = M(self, 'invalid_token', state)
476 476 raise formencode.Invalid(msg, value, state)
477 477 return _validator
478 478
479 479
480 480 def ValidRepoName(edit=False, old_data={}):
481 481 class _validator(formencode.validators.FancyValidator):
482 482 messages = {
483 483 'invalid_repo_name':
484 484 _(u'Repository name %(repo)s is disallowed'),
485 485 # top level
486 486 'repository_exists': _(u'Repository with name %(repo)s '
487 487 u'already exists'),
488 488 'group_exists': _(u'Repository group with name "%(repo)s" '
489 489 u'already exists'),
490 490 # inside a group
491 491 'repository_in_group_exists': _(u'Repository with name %(repo)s '
492 492 u'exists in group "%(group)s"'),
493 493 'group_in_group_exists': _(
494 494 u'Repository group with name "%(repo)s" '
495 495 u'exists in group "%(group)s"'),
496 496 }
497 497
498 498 def _to_python(self, value, state):
499 499 repo_name = repo_name_slug(value.get('repo_name', ''))
500 500 repo_group = value.get('repo_group')
501 501 if repo_group:
502 502 gr = RepoGroup.get(repo_group)
503 503 group_path = gr.full_path
504 504 group_name = gr.group_name
505 505 # value needs to be aware of group name in order to check
506 506 # db key This is an actual just the name to store in the
507 507 # database
508 508 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
509 509 else:
510 510 group_name = group_path = ''
511 511 repo_name_full = repo_name
512 512
513 513 value['repo_name'] = repo_name
514 514 value['repo_name_full'] = repo_name_full
515 515 value['group_path'] = group_path
516 516 value['group_name'] = group_name
517 517 return value
518 518
519 519 def validate_python(self, value, state):
520 520
521 521 repo_name = value.get('repo_name')
522 522 repo_name_full = value.get('repo_name_full')
523 523 group_path = value.get('group_path')
524 524 group_name = value.get('group_name')
525 525
526 526 if repo_name in [ADMIN_PREFIX, '']:
527 527 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
528 528 raise formencode.Invalid(
529 529 msg, value, state, error_dict={'repo_name': msg})
530 530
531 531 rename = old_data.get('repo_name') != repo_name_full
532 532 create = not edit
533 533 if rename or create:
534 534
535 535 if group_path:
536 536 if Repository.get_by_repo_name(repo_name_full):
537 537 msg = M(self, 'repository_in_group_exists', state,
538 538 repo=repo_name, group=group_name)
539 539 raise formencode.Invalid(
540 540 msg, value, state, error_dict={'repo_name': msg})
541 541 if RepoGroup.get_by_group_name(repo_name_full):
542 542 msg = M(self, 'group_in_group_exists', state,
543 543 repo=repo_name, group=group_name)
544 544 raise formencode.Invalid(
545 545 msg, value, state, error_dict={'repo_name': msg})
546 546 else:
547 547 if RepoGroup.get_by_group_name(repo_name_full):
548 548 msg = M(self, 'group_exists', state, repo=repo_name)
549 549 raise formencode.Invalid(
550 550 msg, value, state, error_dict={'repo_name': msg})
551 551
552 552 if Repository.get_by_repo_name(repo_name_full):
553 553 msg = M(
554 554 self, 'repository_exists', state, repo=repo_name)
555 555 raise formencode.Invalid(
556 556 msg, value, state, error_dict={'repo_name': msg})
557 557 return value
558 558 return _validator
559 559
560 560
561 561 def ValidForkName(*args, **kwargs):
562 562 return ValidRepoName(*args, **kwargs)
563 563
564 564
565 565 def SlugifyName():
566 566 class _validator(formencode.validators.FancyValidator):
567 567
568 568 def _to_python(self, value, state):
569 569 return repo_name_slug(value)
570 570
571 571 def validate_python(self, value, state):
572 572 pass
573 573
574 574 return _validator
575 575
576 576
577 577 def CannotHaveGitSuffix():
578 578 class _validator(formencode.validators.FancyValidator):
579 579 messages = {
580 580 'has_git_suffix':
581 581 _(u'Repository name cannot end with .git'),
582 582 }
583 583
584 584 def _to_python(self, value, state):
585 585 return value
586 586
587 587 def validate_python(self, value, state):
588 588 if value and value.endswith('.git'):
589 589 msg = M(
590 590 self, 'has_git_suffix', state)
591 591 raise formencode.Invalid(
592 592 msg, value, state, error_dict={'repo_name': msg})
593 593
594 594 return _validator
595 595
596 596
597 597 def ValidCloneUri():
598 598 class InvalidCloneUrl(Exception):
599 599 allowed_prefixes = ()
600 600
601 601 def url_handler(repo_type, url):
602 602 config = make_db_config(clear_session=False)
603 603 if repo_type == 'hg':
604 604 allowed_prefixes = ('http', 'svn+http', 'git+http')
605 605
606 606 if 'http' in url[:4]:
607 607 # initially check if it's at least the proper URL
608 608 # or does it pass basic auth
609 609 MercurialRepository.check_url(url, config)
610 610 elif 'svn+http' in url[:8]: # svn->hg import
611 611 SubversionRepository.check_url(url, config)
612 612 elif 'git+http' in url[:8]: # git->hg import
613 613 raise NotImplementedError()
614 614 else:
615 615 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
616 616 'Allowed url must start with one of %s'
617 617 % (url, ','.join(allowed_prefixes)))
618 618 exc.allowed_prefixes = allowed_prefixes
619 619 raise exc
620 620
621 621 elif repo_type == 'git':
622 622 allowed_prefixes = ('http', 'svn+http', 'hg+http')
623 623 if 'http' in url[:4]:
624 624 # initially check if it's at least the proper URL
625 625 # or does it pass basic auth
626 626 GitRepository.check_url(url, config)
627 627 elif 'svn+http' in url[:8]: # svn->git import
628 628 raise NotImplementedError()
629 629 elif 'hg+http' in url[:8]: # hg->git import
630 630 raise NotImplementedError()
631 631 else:
632 632 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
633 633 'Allowed url must start with one of %s'
634 634 % (url, ','.join(allowed_prefixes)))
635 635 exc.allowed_prefixes = allowed_prefixes
636 636 raise exc
637 637
638 638 class _validator(formencode.validators.FancyValidator):
639 639 messages = {
640 640 'clone_uri': _(u'invalid clone url for %(rtype)s repository'),
641 641 'invalid_clone_uri': _(
642 642 u'Invalid clone url, provide a valid clone '
643 643 u'url starting with one of %(allowed_prefixes)s')
644 644 }
645 645
646 646 def validate_python(self, value, state):
647 647 repo_type = value.get('repo_type')
648 648 url = value.get('clone_uri')
649 649
650 650 if url:
651 651 try:
652 652 url_handler(repo_type, url)
653 653 except InvalidCloneUrl as e:
654 654 log.warning(e)
655 655 msg = M(self, 'invalid_clone_uri', rtype=repo_type,
656 656 allowed_prefixes=','.join(e.allowed_prefixes))
657 657 raise formencode.Invalid(msg, value, state,
658 658 error_dict={'clone_uri': msg})
659 659 except Exception:
660 660 log.exception('Url validation failed')
661 661 msg = M(self, 'clone_uri', rtype=repo_type)
662 662 raise formencode.Invalid(msg, value, state,
663 663 error_dict={'clone_uri': msg})
664 664 return _validator
665 665
666 666
667 667 def ValidForkType(old_data={}):
668 668 class _validator(formencode.validators.FancyValidator):
669 669 messages = {
670 670 'invalid_fork_type': _(u'Fork have to be the same type as parent')
671 671 }
672 672
673 673 def validate_python(self, value, state):
674 674 if old_data['repo_type'] != value:
675 675 msg = M(self, 'invalid_fork_type', state)
676 676 raise formencode.Invalid(
677 677 msg, value, state, error_dict={'repo_type': msg}
678 678 )
679 679 return _validator
680 680
681 681
682 682 def CanWriteGroup(old_data=None):
683 683 class _validator(formencode.validators.FancyValidator):
684 684 messages = {
685 685 'permission_denied': _(
686 686 u"You do not have the permission "
687 687 u"to create repositories in this group."),
688 688 'permission_denied_root': _(
689 689 u"You do not have the permission to store repositories in "
690 690 u"the root location.")
691 691 }
692 692
693 693 def _to_python(self, value, state):
694 694 # root location
695 695 if value in [-1, "-1"]:
696 696 return None
697 697 return value
698 698
699 699 def validate_python(self, value, state):
700 700 gr = RepoGroup.get(value)
701 701 gr_name = gr.group_name if gr else None # None means ROOT location
702 702 # create repositories with write permission on group is set to true
703 703 create_on_write = HasPermissionAny(
704 704 'hg.create.write_on_repogroup.true')()
705 705 group_admin = HasRepoGroupPermissionAny('group.admin')(
706 706 gr_name, 'can write into group validator')
707 707 group_write = HasRepoGroupPermissionAny('group.write')(
708 708 gr_name, 'can write into group validator')
709 709 forbidden = not (group_admin or (group_write and create_on_write))
710 710 can_create_repos = HasPermissionAny(
711 711 'hg.admin', 'hg.create.repository')
712 712 gid = (old_data['repo_group'].get('group_id')
713 713 if (old_data and 'repo_group' in old_data) else None)
714 714 value_changed = gid != safe_int(value)
715 715 new = not old_data
716 716 # do check if we changed the value, there's a case that someone got
717 717 # revoked write permissions to a repository, he still created, we
718 718 # don't need to check permission if he didn't change the value of
719 719 # groups in form box
720 720 if value_changed or new:
721 721 # parent group need to be existing
722 722 if gr and forbidden:
723 723 msg = M(self, 'permission_denied', state)
724 724 raise formencode.Invalid(
725 725 msg, value, state, error_dict={'repo_type': msg}
726 726 )
727 727 # check if we can write to root location !
728 728 elif gr is None and not can_create_repos():
729 729 msg = M(self, 'permission_denied_root', state)
730 730 raise formencode.Invalid(
731 731 msg, value, state, error_dict={'repo_type': msg}
732 732 )
733 733
734 734 return _validator
735 735
736 736
737 737 def ValidPerms(type_='repo'):
738 738 if type_ == 'repo_group':
739 739 EMPTY_PERM = 'group.none'
740 740 elif type_ == 'repo':
741 741 EMPTY_PERM = 'repository.none'
742 742 elif type_ == 'user_group':
743 743 EMPTY_PERM = 'usergroup.none'
744 744
745 745 class _validator(formencode.validators.FancyValidator):
746 746 messages = {
747 747 'perm_new_member_name':
748 748 _(u'This username or user group name is not valid')
749 749 }
750 750
751 751 def _to_python(self, value, state):
752 752 perm_updates = OrderedSet()
753 753 perm_additions = OrderedSet()
754 754 perm_deletions = OrderedSet()
755 755 # build a list of permission to update/delete and new permission
756 756
757 757 # Read the perm_new_member/perm_del_member attributes and group
758 758 # them by they IDs
759 759 new_perms_group = defaultdict(dict)
760 760 del_perms_group = defaultdict(dict)
761 761 for k, v in value.copy().iteritems():
762 762 if k.startswith('perm_del_member'):
763 763 # delete from org storage so we don't process that later
764 764 del value[k]
765 765 # part is `id`, `type`
766 766 _type, part = k.split('perm_del_member_')
767 767 args = part.split('_')
768 768 if len(args) == 2:
769 769 _key, pos = args
770 770 del_perms_group[pos][_key] = v
771 771 if k.startswith('perm_new_member'):
772 772 # delete from org storage so we don't process that later
773 773 del value[k]
774 774 # part is `id`, `type`, `perm`
775 775 _type, part = k.split('perm_new_member_')
776 776 args = part.split('_')
777 777 if len(args) == 2:
778 778 _key, pos = args
779 779 new_perms_group[pos][_key] = v
780 780
781 781 # store the deletes
782 782 for k in sorted(del_perms_group.keys()):
783 783 perm_dict = del_perms_group[k]
784 784 del_member = perm_dict.get('id')
785 785 del_type = perm_dict.get('type')
786 786 if del_member and del_type:
787 787 perm_deletions.add(
788 788 (del_member, None, del_type))
789 789
790 790 # store additions in order of how they were added in web form
791 791 for k in sorted(new_perms_group.keys()):
792 792 perm_dict = new_perms_group[k]
793 793 new_member = perm_dict.get('id')
794 794 new_type = perm_dict.get('type')
795 795 new_perm = perm_dict.get('perm')
796 796 if new_member and new_perm and new_type:
797 797 perm_additions.add(
798 798 (new_member, new_perm, new_type))
799 799
800 800 # get updates of permissions
801 801 # (read the existing radio button states)
802 802 default_user_id = User.get_default_user().user_id
803 803 for k, update_value in value.iteritems():
804 804 if k.startswith('u_perm_') or k.startswith('g_perm_'):
805 805 member = k[7:]
806 806 update_type = {'u': 'user',
807 807 'g': 'users_group'}[k[0]]
808 808
809 809 if safe_int(member) == default_user_id:
810 810 if str2bool(value.get('repo_private')):
811 811 # prevent from updating default user permissions
812 812 # when this repository is marked as private
813 813 update_value = EMPTY_PERM
814 814
815 815 perm_updates.add(
816 816 (member, update_value, update_type))
817 817
818 818 value['perm_additions'] = [] # propagated later
819 819 value['perm_updates'] = list(perm_updates)
820 820 value['perm_deletions'] = list(perm_deletions)
821 821
822 822 updates_map = dict(
823 823 (x[0], (x[1], x[2])) for x in value['perm_updates'])
824 824 # make sure Additions don't override updates.
825 825 for member_id, perm, member_type in list(perm_additions):
826 826 if member_id in updates_map:
827 827 perm = updates_map[member_id][0]
828 828 value['perm_additions'].append((member_id, perm, member_type))
829 829
830 830 # on new entries validate users they exist and they are active !
831 831 # this leaves feedback to the form
832 832 try:
833 833 if member_type == 'user':
834 834 User.query()\
835 835 .filter(User.active == true())\
836 836 .filter(User.user_id == member_id).one()
837 837 if member_type == 'users_group':
838 838 UserGroup.query()\
839 839 .filter(UserGroup.users_group_active == true())\
840 840 .filter(UserGroup.users_group_id == member_id)\
841 841 .one()
842 842
843 843 except Exception:
844 844 log.exception('Updated permission failed: org_exc:')
845 845 msg = M(self, 'perm_new_member_type', state)
846 846 raise formencode.Invalid(
847 847 msg, value, state, error_dict={
848 848 'perm_new_member_name': msg}
849 849 )
850 850 return value
851 851 return _validator
852 852
853 853
854 854 def ValidSettings():
855 855 class _validator(formencode.validators.FancyValidator):
856 856 def _to_python(self, value, state):
857 857 # settings form for users that are not admin
858 858 # can't edit certain parameters, it's extra backup if they mangle
859 859 # with forms
860 860
861 861 forbidden_params = [
862 862 'user', 'repo_type', 'repo_enable_locking',
863 863 'repo_enable_downloads', 'repo_enable_statistics'
864 864 ]
865 865
866 866 for param in forbidden_params:
867 867 if param in value:
868 868 del value[param]
869 869 return value
870 870
871 871 def validate_python(self, value, state):
872 872 pass
873 873 return _validator
874 874
875 875
876 876 def ValidPath():
877 877 class _validator(formencode.validators.FancyValidator):
878 878 messages = {
879 879 'invalid_path': _(u'This is not a valid path')
880 880 }
881 881
882 882 def validate_python(self, value, state):
883 883 if not os.path.isdir(value):
884 884 msg = M(self, 'invalid_path', state)
885 885 raise formencode.Invalid(
886 886 msg, value, state, error_dict={'paths_root_path': msg}
887 887 )
888 888 return _validator
889 889
890 890
891 891 def UniqSystemEmail(old_data={}):
892 892 class _validator(formencode.validators.FancyValidator):
893 893 messages = {
894 894 'email_taken': _(u'This e-mail address is already taken')
895 895 }
896 896
897 897 def _to_python(self, value, state):
898 898 return value.lower()
899 899
900 900 def validate_python(self, value, state):
901 901 if (old_data.get('email') or '').lower() != value:
902 902 user = User.get_by_email(value, case_insensitive=True)
903 903 if user:
904 904 msg = M(self, 'email_taken', state)
905 905 raise formencode.Invalid(
906 906 msg, value, state, error_dict={'email': msg}
907 907 )
908 908 return _validator
909 909
910 910
911 911 def ValidSystemEmail():
912 912 class _validator(formencode.validators.FancyValidator):
913 913 messages = {
914 914 'non_existing_email': _(u'e-mail "%(email)s" does not exist.')
915 915 }
916 916
917 917 def _to_python(self, value, state):
918 918 return value.lower()
919 919
920 920 def validate_python(self, value, state):
921 921 user = User.get_by_email(value, case_insensitive=True)
922 922 if user is None:
923 923 msg = M(self, 'non_existing_email', state, email=value)
924 924 raise formencode.Invalid(
925 925 msg, value, state, error_dict={'email': msg}
926 926 )
927 927
928 928 return _validator
929 929
930 930
931 931 def NotReviewedRevisions(repo_id):
932 932 class _validator(formencode.validators.FancyValidator):
933 933 messages = {
934 934 'rev_already_reviewed':
935 935 _(u'Revisions %(revs)s are already part of pull request '
936 936 u'or have set status'),
937 937 }
938 938
939 939 def validate_python(self, value, state):
940 940 # check revisions if they are not reviewed, or a part of another
941 941 # pull request
942 942 statuses = ChangesetStatus.query()\
943 943 .filter(ChangesetStatus.revision.in_(value))\
944 944 .filter(ChangesetStatus.repo_id == repo_id)\
945 945 .all()
946 946
947 947 errors = []
948 948 for status in statuses:
949 949 if status.pull_request_id:
950 950 errors.append(['pull_req', status.revision[:12]])
951 951 elif status.status:
952 952 errors.append(['status', status.revision[:12]])
953 953
954 954 if errors:
955 955 revs = ','.join([x[1] for x in errors])
956 956 msg = M(self, 'rev_already_reviewed', state, revs=revs)
957 957 raise formencode.Invalid(
958 958 msg, value, state, error_dict={'revisions': revs})
959 959
960 960 return _validator
961 961
962 962
963 963 def ValidIp():
964 964 class _validator(CIDR):
965 965 messages = {
966 966 'badFormat': _(u'Please enter a valid IPv4 or IpV6 address'),
967 967 'illegalBits': _(
968 968 u'The network size (bits) must be within the range '
969 969 u'of 0-32 (not %(bits)r)'),
970 970 }
971 971
972 972 # we ovveride the default to_python() call
973 973 def to_python(self, value, state):
974 974 v = super(_validator, self).to_python(value, state)
975 v = v.strip()
975 v = safe_unicode(v.strip())
976 976 net = ipaddress.ip_network(address=v, strict=False)
977 977 return str(net)
978 978
979 979 def validate_python(self, value, state):
980 980 try:
981 addr = value.strip()
981 addr = safe_unicode(value.strip())
982 982 # this raises an ValueError if address is not IpV4 or IpV6
983 983 ipaddress.ip_network(addr, strict=False)
984 984 except ValueError:
985 985 raise formencode.Invalid(self.message('badFormat', state),
986 986 value, state)
987 987
988 988 return _validator
989 989
990 990
991 991 def FieldKey():
992 992 class _validator(formencode.validators.FancyValidator):
993 993 messages = {
994 994 'badFormat': _(
995 995 u'Key name can only consist of letters, '
996 996 u'underscore, dash or numbers'),
997 997 }
998 998
999 999 def validate_python(self, value, state):
1000 1000 if not re.match('[a-zA-Z0-9_-]+$', value):
1001 1001 raise formencode.Invalid(self.message('badFormat', state),
1002 1002 value, state)
1003 1003 return _validator
1004 1004
1005 1005
1006 1006 def ValidAuthPlugins():
1007 1007 class _validator(formencode.validators.FancyValidator):
1008 1008 messages = {
1009 1009 'import_duplicate': _(
1010 1010 u'Plugins %(loaded)s and %(next_to_load)s '
1011 1011 u'both export the same name'),
1012 1012 'missing_includeme': _(
1013 1013 u'The plugin "%(plugin_id)s" is missing an includeme '
1014 1014 u'function.'),
1015 1015 'import_error': _(
1016 1016 u'Can not load plugin "%(plugin_id)s"'),
1017 1017 'no_plugin': _(
1018 1018 u'No plugin available with ID "%(plugin_id)s"'),
1019 1019 }
1020 1020
1021 1021 def _to_python(self, value, state):
1022 1022 # filter empty values
1023 1023 return filter(lambda s: s not in [None, ''], value)
1024 1024
1025 1025 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1026 1026 """
1027 1027 Validates that the plugin import works. It also checks that the
1028 1028 plugin has an includeme attribute.
1029 1029 """
1030 1030 try:
1031 1031 plugin = _import_legacy_plugin(plugin_id)
1032 1032 except Exception as e:
1033 1033 log.exception(
1034 1034 'Exception during import of auth legacy plugin "{}"'
1035 1035 .format(plugin_id))
1036 1036 msg = M(self, 'import_error', plugin_id=plugin_id)
1037 1037 raise formencode.Invalid(msg, value, state)
1038 1038
1039 1039 if not hasattr(plugin, 'includeme'):
1040 1040 msg = M(self, 'missing_includeme', plugin_id=plugin_id)
1041 1041 raise formencode.Invalid(msg, value, state)
1042 1042
1043 1043 return plugin
1044 1044
1045 1045 def _validate_plugin_id(self, plugin_id, value, state):
1046 1046 """
1047 1047 Plugins are already imported during app start up. Therefore this
1048 1048 validation only retrieves the plugin from the plugin registry and
1049 1049 if it returns something not None everything is OK.
1050 1050 """
1051 1051 plugin = loadplugin(plugin_id)
1052 1052
1053 1053 if plugin is None:
1054 1054 msg = M(self, 'no_plugin', plugin_id=plugin_id)
1055 1055 raise formencode.Invalid(msg, value, state)
1056 1056
1057 1057 return plugin
1058 1058
1059 1059 def validate_python(self, value, state):
1060 1060 unique_names = {}
1061 1061 for plugin_id in value:
1062 1062
1063 1063 # Validate legacy or normal plugin.
1064 1064 if plugin_id.startswith(legacy_plugin_prefix):
1065 1065 plugin = self._validate_legacy_plugin_id(
1066 1066 plugin_id, value, state)
1067 1067 else:
1068 1068 plugin = self._validate_plugin_id(plugin_id, value, state)
1069 1069
1070 1070 # Only allow unique plugin names.
1071 1071 if plugin.name in unique_names:
1072 1072 msg = M(self, 'import_duplicate', state,
1073 1073 loaded=unique_names[plugin.name],
1074 1074 next_to_load=plugin)
1075 1075 raise formencode.Invalid(msg, value, state)
1076 1076 unique_names[plugin.name] = plugin
1077 1077
1078 1078 return _validator
1079 1079
1080 1080
1081 1081 def ValidPattern():
1082 1082
1083 1083 class _Validator(formencode.validators.FancyValidator):
1084 1084
1085 1085 def _to_python(self, value, state):
1086 1086 patterns = []
1087 1087
1088 1088 prefix = 'new_pattern'
1089 1089 for name, v in value.iteritems():
1090 1090 pattern_name = '_'.join((prefix, 'pattern'))
1091 1091 if name.startswith(pattern_name):
1092 1092 new_item_id = name[len(pattern_name)+1:]
1093 1093
1094 1094 def _field(name):
1095 1095 return '%s_%s_%s' % (prefix, name, new_item_id)
1096 1096
1097 1097 values = {
1098 1098 'issuetracker_pat': value.get(_field('pattern')),
1099 1099 'issuetracker_pat': value.get(_field('pattern')),
1100 1100 'issuetracker_url': value.get(_field('url')),
1101 1101 'issuetracker_pref': value.get(_field('prefix')),
1102 1102 'issuetracker_desc': value.get(_field('description'))
1103 1103 }
1104 1104 new_uid = md5(values['issuetracker_pat'])
1105 1105
1106 1106 has_required_fields = (
1107 1107 values['issuetracker_pat']
1108 1108 and values['issuetracker_url'])
1109 1109
1110 1110 if has_required_fields:
1111 1111 settings = [
1112 1112 ('_'.join((key, new_uid)), values[key], 'unicode')
1113 1113 for key in values]
1114 1114 patterns.append(settings)
1115 1115
1116 1116 value['patterns'] = patterns
1117 1117 delete_patterns = value.get('uid') or []
1118 1118 if not isinstance(delete_patterns, (list, tuple)):
1119 1119 delete_patterns = [delete_patterns]
1120 1120 value['delete_patterns'] = delete_patterns
1121 1121 return value
1122 1122 return _Validator
@@ -1,323 +1,323 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22 import mock
23 23
24 24 from rhodecode.lib.utils2 import safe_unicode
25 25 from rhodecode.model.db import (
26 26 true, User, UserGroup, UserGroupMember, UserEmailMap, Permission, UserIpMap)
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.model.user import UserModel
29 29 from rhodecode.model.user_group import UserGroupModel
30 30 from rhodecode.model.repo import RepoModel
31 31 from rhodecode.model.repo_group import RepoGroupModel
32 32 from rhodecode.tests.fixture import Fixture
33 33
34 34 fixture = Fixture()
35 35
36 36
37 37 class TestGetUsers(object):
38 38 def test_returns_active_users(self, backend, user_util):
39 39 for i in range(4):
40 40 is_active = i % 2 == 0
41 41 user_util.create_user(active=is_active, lastname='Fake user')
42 42
43 43 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
44 44 users = UserModel().get_users()
45 45 fake_users = [u for u in users if u['last_name'] == 'Fake user']
46 46 assert len(fake_users) == 2
47 47
48 48 expected_keys = (
49 49 'id', 'first_name', 'last_name', 'username', 'icon_link',
50 50 'value_display', 'value', 'value_type')
51 51 for user in users:
52 52 assert user['value_type'] is 'user'
53 53 for key in expected_keys:
54 54 assert key in user
55 55
56 56 def test_returns_user_filtered_by_last_name(self, backend, user_util):
57 57 keywords = ('aBc', u'ΓΌnicode')
58 58 for keyword in keywords:
59 59 for i in range(2):
60 60 user_util.create_user(
61 61 active=True, lastname=u'Fake {} user'.format(keyword))
62 62
63 63 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
64 64 keyword = keywords[1].lower()
65 65 users = UserModel().get_users(name_contains=keyword)
66 66
67 67 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
68 68 assert len(fake_users) == 2
69 69 for user in fake_users:
70 70 assert user['last_name'] == safe_unicode('Fake ΓΌnicode user')
71 71
72 72 def test_returns_user_filtered_by_first_name(self, backend, user_util):
73 73 created_users = []
74 74 keywords = ('aBc', u'ΓΌnicode')
75 75 for keyword in keywords:
76 76 for i in range(2):
77 77 created_users.append(user_util.create_user(
78 78 active=True, lastname='Fake user',
79 79 firstname=u'Fake {} user'.format(keyword)))
80 80
81 81 keyword = keywords[1].lower()
82 82 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
83 83 users = UserModel().get_users(name_contains=keyword)
84 84
85 85 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
86 86 assert len(fake_users) == 2
87 87 for user in fake_users:
88 88 assert user['first_name'] == safe_unicode('Fake ΓΌnicode user')
89 89
90 90 def test_returns_user_filtered_by_username(self, backend, user_util):
91 91 created_users = []
92 92 for i in range(5):
93 93 created_users.append(user_util.create_user(
94 94 active=True, lastname='Fake user'))
95 95
96 96 user_filter = created_users[-1].username[-2:]
97 97 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
98 98 users = UserModel().get_users(name_contains=user_filter)
99 99
100 100 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
101 101 assert len(fake_users) == 1
102 102 assert fake_users[0]['username'] == created_users[-1].username
103 103
104 104 def test_returns_limited_user_list(self, backend, user_util):
105 105 created_users = []
106 106 for i in range(5):
107 107 created_users.append(user_util.create_user(
108 108 active=True, lastname='Fake user'))
109 109
110 110 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
111 111 users = UserModel().get_users(name_contains='Fake', limit=3)
112 112
113 113 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
114 114 assert len(fake_users) == 3
115 115
116 116
117 117 @pytest.fixture
118 118 def test_user(request, pylonsapp):
119 119 usr = UserModel().create_or_update(
120 120 username=u'test_user',
121 121 password=u'qweqwe',
122 122 email=u'main_email@rhodecode.org',
123 123 firstname=u'u1', lastname=u'u1')
124 124 Session().commit()
125 125 assert User.get_by_username(u'test_user') == usr
126 126
127 127 @request.addfinalizer
128 128 def cleanup():
129 129 if UserModel().get_user(usr.user_id) is None:
130 130 return
131 131
132 132 perm = Permission.query().all()
133 133 for p in perm:
134 134 UserModel().revoke_perm(usr, p)
135 135
136 136 UserModel().delete(usr.user_id)
137 137 Session().commit()
138 138
139 139 return usr
140 140
141 141
142 142 def test_create_and_remove(test_user):
143 143 usr = test_user
144 144
145 145 # make user group
146 146 user_group = fixture.create_user_group('some_example_group')
147 147 Session().commit()
148 148
149 149 UserGroupModel().add_user_to_group(user_group, usr)
150 150 Session().commit()
151 151
152 152 assert UserGroup.get(user_group.users_group_id) == user_group
153 153 assert UserGroupMember.query().count() == 1
154 154 UserModel().delete(usr.user_id)
155 155 Session().commit()
156 156
157 157 assert UserGroupMember.query().all() == []
158 158
159 159
160 160 def test_additonal_email_as_main(test_user):
161 161 with pytest.raises(AttributeError):
162 162 m = UserEmailMap()
163 163 m.email = test_user.email
164 164 m.user = test_user
165 165 Session().add(m)
166 166 Session().commit()
167 167
168 168
169 169 def test_extra_email_map(test_user):
170 170
171 171 m = UserEmailMap()
172 172 m.email = u'main_email2@rhodecode.org'
173 173 m.user = test_user
174 174 Session().add(m)
175 175 Session().commit()
176 176
177 177 u = User.get_by_email(email='main_email@rhodecode.org')
178 178 assert test_user.user_id == u.user_id
179 179 assert test_user.username == u.username
180 180
181 181 u = User.get_by_email(email='main_email2@rhodecode.org')
182 182 assert test_user.user_id == u.user_id
183 183 assert test_user.username == u.username
184 184 u = User.get_by_email(email='main_email3@rhodecode.org')
185 185 assert u is None
186 186
187 187
188 188 def test_get_api_data_replaces_secret_data_by_default(test_user):
189 189 api_data = test_user.get_api_data()
190 190 api_key_length = 40
191 191 expected_replacement = '*' * api_key_length
192 192
193 193 for key in api_data['api_keys']:
194 194 assert key == expected_replacement
195 195
196 196
197 197 def test_get_api_data_includes_secret_data_if_activated(test_user):
198 198 api_data = test_user.get_api_data(include_secrets=True)
199 199 assert api_data['api_keys'] == test_user.auth_tokens
200 200
201 201
202 202 def test_add_perm(test_user):
203 203 perm = Permission.query().all()[0]
204 204 UserModel().grant_perm(test_user, perm)
205 205 Session().commit()
206 206 assert UserModel().has_perm(test_user, perm)
207 207
208 208
209 209 def test_has_perm(test_user):
210 210 perm = Permission.query().all()
211 211 for p in perm:
212 212 assert not UserModel().has_perm(test_user, p)
213 213
214 214
215 215 def test_revoke_perm(test_user):
216 216 perm = Permission.query().all()[0]
217 217 UserModel().grant_perm(test_user, perm)
218 218 Session().commit()
219 219 assert UserModel().has_perm(test_user, perm)
220 220
221 221 # revoke
222 222 UserModel().revoke_perm(test_user, perm)
223 223 Session().commit()
224 224 assert not UserModel().has_perm(test_user, perm)
225 225
226 226
227 227 @pytest.mark.parametrize("ip_range, expected, expect_errors", [
228 228 ('', [], False),
229 229 ('127.0.0.1', ['127.0.0.1'], False),
230 230 ('127.0.0.1,127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
231 231 ('127.0.0.1 , 127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
232 232 (
233 233 '127.0.0.1,172.172.172.0,127.0.0.2',
234 234 ['127.0.0.1', '172.172.172.0', '127.0.0.2'], False),
235 235 (
236 236 '127.0.0.1-127.0.0.5',
237 237 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
238 238 False),
239 239 (
240 240 '127.0.0.1 - 127.0.0.5',
241 241 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
242 242 False
243 243 ),
244 244 ('-', [], True),
245 245 ('127.0.0.1-32', [], True),
246 246 (
247 247 '127.0.0.1,127.0.0.1,127.0.0.1,127.0.0.1-127.0.0.2,127.0.0.2',
248 248 ['127.0.0.1', '127.0.0.2'], False),
249 249 (
250 250 '127.0.0.1-127.0.0.2,127.0.0.4-127.0.0.6,',
251 251 ['127.0.0.1', '127.0.0.2', '127.0.0.4', '127.0.0.5', '127.0.0.6'],
252 252 False
253 253 ),
254 254 (
255 255 '127.0.0.1-127.0.0.2,127.0.0.1-127.0.0.6,',
256 256 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5',
257 257 '127.0.0.6'],
258 258 False
259 259 ),
260 260 ])
261 261 def test_ip_range_generator(ip_range, expected, expect_errors):
262 262 func = UserModel().parse_ip_range
263 263 if expect_errors:
264 pytest.raises(Exception, func, ip_range)
264 pytest.raises(ValueError, func, ip_range)
265 265 else:
266 266 parsed_list = func(ip_range)
267 267 assert parsed_list == expected
268 268
269 269
270 270 def test_user_delete_cascades_ip_whitelist(test_user):
271 271 sample_ip = '1.1.1.1'
272 272 uid_map = UserIpMap(user_id=test_user.user_id, ip_addr=sample_ip)
273 273 Session().add(uid_map)
274 274 Session().delete(test_user)
275 275 try:
276 276 Session().flush()
277 277 finally:
278 278 Session().rollback()
279 279
280 280
281 281 def test_account_for_deactivation_generation(test_user):
282 282 accounts = UserModel().get_accounts_in_creation_order(
283 283 current_user=test_user)
284 284 # current user should be #1 in the list
285 285 assert accounts[0] == test_user.user_id
286 286 active_users = User.query().filter(User.active == true()).count()
287 287 assert active_users == len(accounts)
288 288
289 289
290 290 def test_user_delete_cascades_permissions_on_repo(backend, test_user):
291 291 test_repo = backend.create_repo()
292 292 RepoModel().grant_user_permission(
293 293 test_repo, test_user, 'repository.write')
294 294 Session().commit()
295 295
296 296 assert test_user.repo_to_perm
297 297
298 298 UserModel().delete(test_user)
299 299 Session().commit()
300 300
301 301
302 302 def test_user_delete_cascades_permissions_on_repo_group(
303 303 test_repo_group, test_user):
304 304 RepoGroupModel().grant_user_permission(
305 305 test_repo_group, test_user, 'group.write')
306 306 Session().commit()
307 307
308 308 assert test_user.repo_group_to_perm
309 309
310 310 Session().delete(test_user)
311 311 Session().commit()
312 312
313 313
314 314 def test_user_delete_cascades_permissions_on_user_group(
315 315 test_user_group, test_user):
316 316 UserGroupModel().grant_user_permission(
317 317 test_user_group, test_user, 'usergroup.write')
318 318 Session().commit()
319 319
320 320 assert test_user.user_group_to_perm
321 321
322 322 Session().delete(test_user)
323 323 Session().commit()
@@ -1,257 +1,256 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 # Import early to make sure things are patched up properly
22 22 from setuptools import setup, find_packages
23 23
24 24 import os
25 25 import sys
26 26 import pkgutil
27 27 import platform
28 28
29 29 from pip.download import PipSession
30 30 from pip.req import parse_requirements
31 31
32 32 from codecs import open
33 33
34 34
35 35 if sys.version_info < (2, 7):
36 36 raise Exception('RhodeCode requires Python 2.7 or later')
37 37
38 38 here = os.path.abspath(os.path.dirname(__file__))
39 39
40 40 # defines current platform
41 41 __platform__ = platform.system()
42 42 __license__ = 'AGPLv3, and Commercial License'
43 43 __author__ = 'RhodeCode GmbH'
44 44 __url__ = 'https://code.rhodecode.com'
45 45 is_windows = __platform__ in ('Windows',)
46 46
47 47
48 48 def _get_requirements(req_filename, exclude=None, extras=None):
49 49 extras = extras or []
50 50 exclude = exclude or []
51 51
52 52 try:
53 53 parsed = parse_requirements(
54 54 os.path.join(here, req_filename), session=PipSession())
55 55 except TypeError:
56 56 # try pip < 6.0.0, that doesn't support session
57 57 parsed = parse_requirements(os.path.join(here, req_filename))
58 58
59 59 requirements = []
60 60 for ir in parsed:
61 61 if ir.req and ir.name not in exclude:
62 62 requirements.append(str(ir.req))
63 63 return requirements + extras
64 64
65 65
66 66 # requirements extract
67 67 setup_requirements = ['PasteScript', 'pytest-runner']
68 68 install_requirements = _get_requirements(
69 69 'requirements.txt', exclude=['setuptools'])
70 70 test_requirements = _get_requirements(
71 71 'requirements_test.txt', extras=['configobj'])
72 72
73 73 install_requirements = [
74 74 'Babel',
75 75 'Beaker',
76 76 'FormEncode',
77 77 'Mako',
78 78 'Markdown',
79 79 'MarkupSafe',
80 80 'MySQL-python',
81 81 'Paste',
82 82 'PasteDeploy',
83 83 'PasteScript',
84 84 'Pygments',
85 85 'pygments-markdown-lexer',
86 86 'Pylons',
87 87 'Routes',
88 88 'SQLAlchemy',
89 89 'Tempita',
90 90 'URLObject',
91 91 'WebError',
92 92 'WebHelpers',
93 93 'WebHelpers2',
94 94 'WebOb',
95 95 'WebTest',
96 96 'Whoosh',
97 97 'alembic',
98 98 'amqplib',
99 99 'anyjson',
100 100 'appenlight-client',
101 101 'authomatic',
102 'backport_ipaddress',
103 102 'cssselect',
104 103 'celery',
105 104 'channelstream',
106 105 'colander',
107 106 'decorator',
108 107 'deform',
109 108 'docutils',
110 109 'gevent',
111 110 'gunicorn',
112 111 'infrae.cache',
113 112 'ipython',
114 113 'iso8601',
115 114 'kombu',
116 115 'lxml',
117 116 'msgpack-python',
118 117 'nbconvert',
119 118 'packaging',
120 119 'psycopg2',
121 120 'py-gfm',
122 121 'pycrypto',
123 122 'pycurl',
124 123 'pyparsing',
125 124 'pyramid',
126 125 'pyramid-debugtoolbar',
127 126 'pyramid-mako',
128 127 'pyramid-beaker',
129 128 'pysqlite',
130 129 'python-dateutil',
131 130 'python-ldap',
132 131 'python-memcached',
133 132 'python-pam',
134 133 'recaptcha-client',
135 134 'repoze.lru',
136 135 'requests',
137 136 'simplejson',
138 137 'subprocess32',
139 138 'waitress',
140 139 'zope.cachedescriptors',
141 140 'dogpile.cache',
142 141 'dogpile.core',
143 142 'psutil',
144 143 'py-bcrypt',
145 144 ]
146 145
147 146
148 147 def get_version():
149 148 version = pkgutil.get_data('rhodecode', 'VERSION')
150 149 return version.strip()
151 150
152 151
153 152 # additional files that goes into package itself
154 153 package_data = {
155 154 '': ['*.txt', '*.rst'],
156 155 'configs': ['*.ini'],
157 156 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
158 157 }
159 158
160 159 description = 'Source Code Management Platform'
161 160 keywords = ' '.join([
162 161 'rhodecode', 'mercurial', 'git', 'svn',
163 162 'code review',
164 163 'repo groups', 'ldap', 'repository management', 'hgweb',
165 164 'hgwebdir', 'gitweb', 'serving hgweb',
166 165 ])
167 166
168 167
169 168 # README/DESCRIPTION generation
170 169 readme_file = 'README.rst'
171 170 changelog_file = 'CHANGES.rst'
172 171 try:
173 172 long_description = open(readme_file).read() + '\n\n' + \
174 173 open(changelog_file).read()
175 174 except IOError as err:
176 175 sys.stderr.write(
177 176 "[WARNING] Cannot find file specified as long_description (%s)\n "
178 177 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
179 178 long_description = description
180 179
181 180
182 181 setup(
183 182 name='rhodecode-enterprise-ce',
184 183 version=get_version(),
185 184 description=description,
186 185 long_description=long_description,
187 186 keywords=keywords,
188 187 license=__license__,
189 188 author=__author__,
190 189 author_email='marcin@rhodecode.com',
191 190 url=__url__,
192 191 setup_requires=setup_requirements,
193 192 install_requires=install_requirements,
194 193 tests_require=test_requirements,
195 194 zip_safe=False,
196 195 packages=find_packages(exclude=["docs", "tests*"]),
197 196 package_data=package_data,
198 197 include_package_data=True,
199 198 classifiers=[
200 199 'Development Status :: 6 - Mature',
201 200 'Environment :: Web Environment',
202 201 'Intended Audience :: Developers',
203 202 'Operating System :: OS Independent',
204 203 'Topic :: Software Development :: Version Control',
205 204 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
206 205 'Programming Language :: Python :: 2.7',
207 206 ],
208 207 message_extractors={
209 208 'rhodecode': [
210 209 ('**.py', 'python', None),
211 210 ('**.js', 'javascript', None),
212 211 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
213 212 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
214 213 ('public/**', 'ignore', None),
215 214 ]
216 215 },
217 216 paster_plugins=['PasteScript', 'Pylons'],
218 217 entry_points={
219 218 'enterprise.plugins1': [
220 219 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 220 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 221 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 222 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 223 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 224 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 225 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 226 ],
228 227 'paste.app_factory': [
229 228 'main=rhodecode.config.middleware:make_pyramid_app',
230 229 'pylons=rhodecode.config.middleware:make_app',
231 230 ],
232 231 'paste.app_install': [
233 232 'main=pylons.util:PylonsInstaller',
234 233 'pylons=pylons.util:PylonsInstaller',
235 234 ],
236 235 'paste.global_paster_command': [
237 236 'make-config=rhodecode.lib.paster_commands.make_config:Command',
238 237 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
239 238 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
240 239 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
241 240 'ishell=rhodecode.lib.paster_commands.ishell:Command',
242 241 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
243 242 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
244 243 ],
245 244 'pytest11': [
246 245 'pylons=rhodecode.tests.pylons_plugin',
247 246 'enterprise=rhodecode.tests.plugin',
248 247 ],
249 248 'console_scripts': [
250 249 'rcserver=rhodecode.rcserver:main',
251 250 ],
252 251 'beaker.backends': [
253 252 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
254 253 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
255 254 ]
256 255 },
257 256 )
General Comments 0
You need to be logged in to leave comments. Login now