##// END OF EJS Templates
dependencies: bumped pyramid to 1.9 webob to 1.7.3 and webtest to 2.0.27...
marcink -
r1906:1eaf71e3 default
parent child Browse files
Show More
@@ -1,2034 +1,2086 b''
1 1 # Generated by pip2nix 0.4.0
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 {
5 5 Babel = super.buildPythonPackage {
6 6 name = "Babel-1.3";
7 7 buildInputs = with self; [];
8 8 doCheck = false;
9 9 propagatedBuildInputs = with self; [pytz];
10 10 src = fetchurl {
11 11 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
12 12 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 16 };
17 17 };
18 18 Beaker = super.buildPythonPackage {
19 19 name = "Beaker-1.9.0";
20 20 buildInputs = with self; [];
21 21 doCheck = false;
22 22 propagatedBuildInputs = with self; [funcsigs];
23 23 src = fetchurl {
24 24 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
25 25 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
26 26 };
27 27 meta = {
28 28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 29 };
30 30 };
31 31 CProfileV = super.buildPythonPackage {
32 32 name = "CProfileV-1.0.7";
33 33 buildInputs = with self; [];
34 34 doCheck = false;
35 35 propagatedBuildInputs = with self; [bottle];
36 36 src = fetchurl {
37 37 url = "https://pypi.python.org/packages/df/50/d8c1ada7d537c64b0f76453fa31dedb6af6e27b82fcf0331e5f71a4cf98b/CProfileV-1.0.7.tar.gz";
38 38 md5 = "db4c7640438aa3d8887e194c81c7a019";
39 39 };
40 40 meta = {
41 41 license = [ pkgs.lib.licenses.mit ];
42 42 };
43 43 };
44 44 Chameleon = super.buildPythonPackage {
45 45 name = "Chameleon-2.24";
46 46 buildInputs = with self; [];
47 47 doCheck = false;
48 48 propagatedBuildInputs = with self; [];
49 49 src = fetchurl {
50 50 url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
51 51 md5 = "1b01f1f6533a8a11d0d2f2366dec5342";
52 52 };
53 53 meta = {
54 54 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
55 55 };
56 56 };
57 57 FormEncode = super.buildPythonPackage {
58 58 name = "FormEncode-1.2.4";
59 59 buildInputs = with self; [];
60 60 doCheck = false;
61 61 propagatedBuildInputs = with self; [];
62 62 src = fetchurl {
63 63 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
64 64 md5 = "6bc17fb9aed8aea198975e888e2077f4";
65 65 };
66 66 meta = {
67 67 license = [ pkgs.lib.licenses.psfl ];
68 68 };
69 69 };
70 70 Jinja2 = super.buildPythonPackage {
71 71 name = "Jinja2-2.7.3";
72 72 buildInputs = with self; [];
73 73 doCheck = false;
74 74 propagatedBuildInputs = with self; [MarkupSafe];
75 75 src = fetchurl {
76 76 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
77 77 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
78 78 };
79 79 meta = {
80 80 license = [ pkgs.lib.licenses.bsdOriginal ];
81 81 };
82 82 };
83 83 Mako = super.buildPythonPackage {
84 84 name = "Mako-1.0.6";
85 85 buildInputs = with self; [];
86 86 doCheck = false;
87 87 propagatedBuildInputs = with self; [MarkupSafe];
88 88 src = fetchurl {
89 89 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
90 90 md5 = "a28e22a339080316b2acc352b9ee631c";
91 91 };
92 92 meta = {
93 93 license = [ pkgs.lib.licenses.mit ];
94 94 };
95 95 };
96 96 Markdown = super.buildPythonPackage {
97 97 name = "Markdown-2.6.8";
98 98 buildInputs = with self; [];
99 99 doCheck = false;
100 100 propagatedBuildInputs = with self; [];
101 101 src = fetchurl {
102 102 url = "https://pypi.python.org/packages/1d/25/3f6d2cb31ec42ca5bd3bfbea99b63892b735d76e26f20dd2dcc34ffe4f0d/Markdown-2.6.8.tar.gz";
103 103 md5 = "d9ef057a5bd185f6f536400a31fc5d45";
104 104 };
105 105 meta = {
106 106 license = [ pkgs.lib.licenses.bsdOriginal ];
107 107 };
108 108 };
109 109 MarkupSafe = super.buildPythonPackage {
110 110 name = "MarkupSafe-0.23";
111 111 buildInputs = with self; [];
112 112 doCheck = false;
113 113 propagatedBuildInputs = with self; [];
114 114 src = fetchurl {
115 115 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
116 116 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
117 117 };
118 118 meta = {
119 119 license = [ pkgs.lib.licenses.bsdOriginal ];
120 120 };
121 121 };
122 122 MySQL-python = super.buildPythonPackage {
123 123 name = "MySQL-python-1.2.5";
124 124 buildInputs = with self; [];
125 125 doCheck = false;
126 126 propagatedBuildInputs = with self; [];
127 127 src = fetchurl {
128 128 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
129 129 md5 = "654f75b302db6ed8dc5a898c625e030c";
130 130 };
131 131 meta = {
132 132 license = [ pkgs.lib.licenses.gpl1 ];
133 133 };
134 134 };
135 135 Paste = super.buildPythonPackage {
136 136 name = "Paste-2.0.3";
137 137 buildInputs = with self; [];
138 138 doCheck = false;
139 139 propagatedBuildInputs = with self; [six];
140 140 src = fetchurl {
141 141 url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz";
142 142 md5 = "1231e14eae62fa7ed76e9130b04bc61e";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 PasteDeploy = super.buildPythonPackage {
149 149 name = "PasteDeploy-1.5.2";
150 150 buildInputs = with self; [];
151 151 doCheck = false;
152 152 propagatedBuildInputs = with self; [];
153 153 src = fetchurl {
154 154 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
155 155 md5 = "352b7205c78c8de4987578d19431af3b";
156 156 };
157 157 meta = {
158 158 license = [ pkgs.lib.licenses.mit ];
159 159 };
160 160 };
161 161 PasteScript = super.buildPythonPackage {
162 162 name = "PasteScript-1.7.5";
163 163 buildInputs = with self; [];
164 164 doCheck = false;
165 165 propagatedBuildInputs = with self; [Paste PasteDeploy];
166 166 src = fetchurl {
167 167 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
168 168 md5 = "4c72d78dcb6bb993f30536842c16af4d";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.mit ];
172 172 };
173 173 };
174 174 Pygments = super.buildPythonPackage {
175 175 name = "Pygments-2.2.0";
176 176 buildInputs = with self; [];
177 177 doCheck = false;
178 178 propagatedBuildInputs = with self; [];
179 179 src = fetchurl {
180 180 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
181 181 md5 = "13037baca42f16917cbd5ad2fab50844";
182 182 };
183 183 meta = {
184 184 license = [ pkgs.lib.licenses.bsdOriginal ];
185 185 };
186 186 };
187 187 Pylons = super.buildPythonPackage {
188 188 name = "Pylons-1.0.2.dev20170630";
189 189 buildInputs = with self; [];
190 190 doCheck = false;
191 191 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
192 192 src = fetchurl {
193 193 url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f";
194 194 md5 = "f26633726fa2cd3a340316ee6a5d218f";
195 195 };
196 196 meta = {
197 197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 198 };
199 199 };
200 200 Routes = super.buildPythonPackage {
201 201 name = "Routes-1.13";
202 202 buildInputs = with self; [];
203 203 doCheck = false;
204 204 propagatedBuildInputs = with self; [repoze.lru];
205 205 src = fetchurl {
206 206 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
207 207 md5 = "d527b0ab7dd9172b1275a41f97448783";
208 208 };
209 209 meta = {
210 210 license = [ pkgs.lib.licenses.bsdOriginal ];
211 211 };
212 212 };
213 213 SQLAlchemy = super.buildPythonPackage {
214 214 name = "SQLAlchemy-0.9.9";
215 215 buildInputs = with self; [];
216 216 doCheck = false;
217 217 propagatedBuildInputs = with self; [];
218 218 src = fetchurl {
219 219 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
220 220 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
221 221 };
222 222 meta = {
223 223 license = [ pkgs.lib.licenses.mit ];
224 224 };
225 225 };
226 226 Sphinx = super.buildPythonPackage {
227 227 name = "Sphinx-1.2.2";
228 228 buildInputs = with self; [];
229 229 doCheck = false;
230 230 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
231 231 src = fetchurl {
232 232 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
233 233 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
234 234 };
235 235 meta = {
236 236 license = [ pkgs.lib.licenses.bsdOriginal ];
237 237 };
238 238 };
239 239 Tempita = super.buildPythonPackage {
240 240 name = "Tempita-0.5.2";
241 241 buildInputs = with self; [];
242 242 doCheck = false;
243 243 propagatedBuildInputs = with self; [];
244 244 src = fetchurl {
245 245 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
246 246 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
247 247 };
248 248 meta = {
249 249 license = [ pkgs.lib.licenses.mit ];
250 250 };
251 251 };
252 252 URLObject = super.buildPythonPackage {
253 253 name = "URLObject-2.4.0";
254 254 buildInputs = with self; [];
255 255 doCheck = false;
256 256 propagatedBuildInputs = with self; [];
257 257 src = fetchurl {
258 258 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
259 259 md5 = "2ed819738a9f0a3051f31dc9924e3065";
260 260 };
261 261 meta = {
262 262 license = [ ];
263 263 };
264 264 };
265 265 WebError = super.buildPythonPackage {
266 266 name = "WebError-0.10.3";
267 267 buildInputs = with self; [];
268 268 doCheck = false;
269 269 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
270 270 src = fetchurl {
271 271 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
272 272 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
273 273 };
274 274 meta = {
275 275 license = [ pkgs.lib.licenses.mit ];
276 276 };
277 277 };
278 278 WebHelpers = super.buildPythonPackage {
279 279 name = "WebHelpers-1.3";
280 280 buildInputs = with self; [];
281 281 doCheck = false;
282 282 propagatedBuildInputs = with self; [MarkupSafe];
283 283 src = fetchurl {
284 284 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
285 285 md5 = "32749ffadfc40fea51075a7def32588b";
286 286 };
287 287 meta = {
288 288 license = [ pkgs.lib.licenses.bsdOriginal ];
289 289 };
290 290 };
291 291 WebHelpers2 = super.buildPythonPackage {
292 292 name = "WebHelpers2-2.0";
293 293 buildInputs = with self; [];
294 294 doCheck = false;
295 295 propagatedBuildInputs = with self; [MarkupSafe six];
296 296 src = fetchurl {
297 297 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
298 298 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 304 WebOb = super.buildPythonPackage {
305 name = "WebOb-1.3.1";
305 name = "WebOb-1.7.3";
306 306 buildInputs = with self; [];
307 307 doCheck = false;
308 308 propagatedBuildInputs = with self; [];
309 309 src = fetchurl {
310 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
311 md5 = "20918251c5726956ba8fef22d1556177";
310 url = "https://pypi.python.org/packages/46/87/2f96d8d43b2078fae6e1d33fa86b95c228cebed060f4e3c7576cc44ea83b/WebOb-1.7.3.tar.gz";
311 md5 = "350028baffc508e3d23c078118e35316";
312 312 };
313 313 meta = {
314 314 license = [ pkgs.lib.licenses.mit ];
315 315 };
316 316 };
317 317 WebTest = super.buildPythonPackage {
318 name = "WebTest-1.4.3";
318 name = "WebTest-2.0.27";
319 319 buildInputs = with self; [];
320 320 doCheck = false;
321 propagatedBuildInputs = with self; [WebOb];
321 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
322 322 src = fetchurl {
323 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
324 md5 = "631ce728bed92c681a4020a36adbc353";
323 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
324 md5 = "54e6515ac71c51b6fc90179483c749ad";
325 325 };
326 326 meta = {
327 327 license = [ pkgs.lib.licenses.mit ];
328 328 };
329 329 };
330 330 Whoosh = super.buildPythonPackage {
331 331 name = "Whoosh-2.7.4";
332 332 buildInputs = with self; [];
333 333 doCheck = false;
334 334 propagatedBuildInputs = with self; [];
335 335 src = fetchurl {
336 336 url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
337 337 md5 = "c2710105f20b3e29936bd2357383c325";
338 338 };
339 339 meta = {
340 340 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
341 341 };
342 342 };
343 343 alembic = super.buildPythonPackage {
344 344 name = "alembic-0.9.2";
345 345 buildInputs = with self; [];
346 346 doCheck = false;
347 347 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor python-dateutil];
348 348 src = fetchurl {
349 349 url = "https://pypi.python.org/packages/78/48/b5b26e7218b415f40b60b92c53853d242e5456c0f19f6c66101d98ff5f2a/alembic-0.9.2.tar.gz";
350 350 md5 = "40daf8bae50969beea40efaaf0839ff4";
351 351 };
352 352 meta = {
353 353 license = [ pkgs.lib.licenses.mit ];
354 354 };
355 355 };
356 356 amqplib = super.buildPythonPackage {
357 357 name = "amqplib-1.0.2";
358 358 buildInputs = with self; [];
359 359 doCheck = false;
360 360 propagatedBuildInputs = with self; [];
361 361 src = fetchurl {
362 362 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
363 363 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
364 364 };
365 365 meta = {
366 366 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
367 367 };
368 368 };
369 369 anyjson = super.buildPythonPackage {
370 370 name = "anyjson-0.3.3";
371 371 buildInputs = with self; [];
372 372 doCheck = false;
373 373 propagatedBuildInputs = with self; [];
374 374 src = fetchurl {
375 375 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
376 376 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 380 };
381 381 };
382 382 appenlight-client = super.buildPythonPackage {
383 383 name = "appenlight-client-0.6.21";
384 384 buildInputs = with self; [];
385 385 doCheck = false;
386 386 propagatedBuildInputs = with self; [WebOb requests six];
387 387 src = fetchurl {
388 388 url = "https://pypi.python.org/packages/c9/23/91b66cfa0b963662c10b2a06ccaadf3f3a4848a7a2aa16255cb43d5160ec/appenlight_client-0.6.21.tar.gz";
389 389 md5 = "273999ac854fdaefa8d0fb61965a4ed9";
390 390 };
391 391 meta = {
392 392 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
393 393 };
394 394 };
395 395 authomatic = super.buildPythonPackage {
396 396 name = "authomatic-0.1.0.post1";
397 397 buildInputs = with self; [];
398 398 doCheck = false;
399 399 propagatedBuildInputs = with self; [];
400 400 src = fetchurl {
401 401 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
402 402 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.mit ];
406 406 };
407 407 };
408 408 backport-ipaddress = super.buildPythonPackage {
409 409 name = "backport-ipaddress-0.1";
410 410 buildInputs = with self; [];
411 411 doCheck = false;
412 412 propagatedBuildInputs = with self; [];
413 413 src = fetchurl {
414 414 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
415 415 md5 = "9c1f45f4361f71b124d7293a60006c05";
416 416 };
417 417 meta = {
418 418 license = [ pkgs.lib.licenses.psfl ];
419 419 };
420 420 };
421 421 backports.shutil-get-terminal-size = super.buildPythonPackage {
422 422 name = "backports.shutil-get-terminal-size-1.0.0";
423 423 buildInputs = with self; [];
424 424 doCheck = false;
425 425 propagatedBuildInputs = with self; [];
426 426 src = fetchurl {
427 427 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
428 428 md5 = "03267762480bd86b50580dc19dff3c66";
429 429 };
430 430 meta = {
431 431 license = [ pkgs.lib.licenses.mit ];
432 432 };
433 433 };
434 beautifulsoup4 = super.buildPythonPackage {
435 name = "beautifulsoup4-4.6.0";
436 buildInputs = with self; [];
437 doCheck = false;
438 propagatedBuildInputs = with self; [];
439 src = fetchurl {
440 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
441 md5 = "c17714d0f91a23b708a592cb3c697728";
442 };
443 meta = {
444 license = [ pkgs.lib.licenses.mit ];
445 };
446 };
434 447 bleach = super.buildPythonPackage {
435 448 name = "bleach-1.5.0";
436 449 buildInputs = with self; [];
437 450 doCheck = false;
438 451 propagatedBuildInputs = with self; [six html5lib];
439 452 src = fetchurl {
440 453 url = "https://pypi.python.org/packages/99/00/25a8fce4de102bf6e3cc76bc4ea60685b2fee33bde1b34830c70cacc26a7/bleach-1.5.0.tar.gz";
441 454 md5 = "b663300efdf421b3b727b19d7be9c7e7";
442 455 };
443 456 meta = {
444 457 license = [ pkgs.lib.licenses.asl20 ];
445 458 };
446 459 };
447 460 bottle = super.buildPythonPackage {
448 461 name = "bottle-0.12.8";
449 462 buildInputs = with self; [];
450 463 doCheck = false;
451 464 propagatedBuildInputs = with self; [];
452 465 src = fetchurl {
453 466 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
454 467 md5 = "13132c0a8f607bf860810a6ee9064c5b";
455 468 };
456 469 meta = {
457 470 license = [ pkgs.lib.licenses.mit ];
458 471 };
459 472 };
460 473 bumpversion = super.buildPythonPackage {
461 474 name = "bumpversion-0.5.3";
462 475 buildInputs = with self; [];
463 476 doCheck = false;
464 477 propagatedBuildInputs = with self; [];
465 478 src = fetchurl {
466 479 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
467 480 md5 = "c66a3492eafcf5ad4b024be9fca29820";
468 481 };
469 482 meta = {
470 483 license = [ pkgs.lib.licenses.mit ];
471 484 };
472 485 };
473 486 celery = super.buildPythonPackage {
474 487 name = "celery-2.2.10";
475 488 buildInputs = with self; [];
476 489 doCheck = false;
477 490 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
478 491 src = fetchurl {
479 492 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
480 493 md5 = "898bc87e54f278055b561316ba73e222";
481 494 };
482 495 meta = {
483 496 license = [ pkgs.lib.licenses.bsdOriginal ];
484 497 };
485 498 };
486 499 channelstream = super.buildPythonPackage {
487 500 name = "channelstream-0.5.2";
488 501 buildInputs = with self; [];
489 502 doCheck = false;
490 503 propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six];
491 504 src = fetchurl {
492 505 url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
493 506 md5 = "1c5eb2a8a405be6f1073da94da6d81d3";
494 507 };
495 508 meta = {
496 509 license = [ pkgs.lib.licenses.bsdOriginal ];
497 510 };
498 511 };
499 512 click = super.buildPythonPackage {
500 513 name = "click-5.1";
501 514 buildInputs = with self; [];
502 515 doCheck = false;
503 516 propagatedBuildInputs = with self; [];
504 517 src = fetchurl {
505 518 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
506 519 md5 = "9c5323008cccfe232a8b161fc8196d41";
507 520 };
508 521 meta = {
509 522 license = [ pkgs.lib.licenses.bsdOriginal ];
510 523 };
511 524 };
512 525 colander = super.buildPythonPackage {
513 526 name = "colander-1.3.3";
514 527 buildInputs = with self; [];
515 528 doCheck = false;
516 529 propagatedBuildInputs = with self; [translationstring iso8601];
517 530 src = fetchurl {
518 531 url = "https://pypi.python.org/packages/54/a9/9862a561e015b2c7b56404c0b13828a8bdc51e05ab3703bd792cec064487/colander-1.3.3.tar.gz";
519 532 md5 = "f5d783768c51d73695f49bbe95778ab4";
520 533 };
521 534 meta = {
522 535 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 536 };
524 537 };
525 538 configobj = super.buildPythonPackage {
526 539 name = "configobj-5.0.6";
527 540 buildInputs = with self; [];
528 541 doCheck = false;
529 542 propagatedBuildInputs = with self; [six];
530 543 src = fetchurl {
531 544 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
532 545 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
533 546 };
534 547 meta = {
535 548 license = [ pkgs.lib.licenses.bsdOriginal ];
536 549 };
537 550 };
538 551 configparser = super.buildPythonPackage {
539 552 name = "configparser-3.5.0";
540 553 buildInputs = with self; [];
541 554 doCheck = false;
542 555 propagatedBuildInputs = with self; [];
543 556 src = fetchurl {
544 557 url = "https://pypi.python.org/packages/7c/69/c2ce7e91c89dc073eb1aa74c0621c3eefbffe8216b3f9af9d3885265c01c/configparser-3.5.0.tar.gz";
545 558 md5 = "cfdd915a5b7a6c09917a64a573140538";
546 559 };
547 560 meta = {
548 561 license = [ pkgs.lib.licenses.mit ];
549 562 };
550 563 };
551 564 cov-core = super.buildPythonPackage {
552 565 name = "cov-core-1.15.0";
553 566 buildInputs = with self; [];
554 567 doCheck = false;
555 568 propagatedBuildInputs = with self; [coverage];
556 569 src = fetchurl {
557 570 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
558 571 md5 = "f519d4cb4c4e52856afb14af52919fe6";
559 572 };
560 573 meta = {
561 574 license = [ pkgs.lib.licenses.mit ];
562 575 };
563 576 };
564 577 coverage = super.buildPythonPackage {
565 578 name = "coverage-3.7.1";
566 579 buildInputs = with self; [];
567 580 doCheck = false;
568 581 propagatedBuildInputs = with self; [];
569 582 src = fetchurl {
570 583 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
571 584 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
572 585 };
573 586 meta = {
574 587 license = [ pkgs.lib.licenses.bsdOriginal ];
575 588 };
576 589 };
577 590 cssselect = super.buildPythonPackage {
578 591 name = "cssselect-1.0.1";
579 592 buildInputs = with self; [];
580 593 doCheck = false;
581 594 propagatedBuildInputs = with self; [];
582 595 src = fetchurl {
583 596 url = "https://pypi.python.org/packages/77/ff/9c865275cd19290feba56344eba570e719efb7ca5b34d67ed12b22ebbb0d/cssselect-1.0.1.tar.gz";
584 597 md5 = "3fa03bf82a9f0b1223c0f1eb1369e139";
585 598 };
586 599 meta = {
587 600 license = [ pkgs.lib.licenses.bsdOriginal ];
588 601 };
589 602 };
590 603 decorator = super.buildPythonPackage {
591 604 name = "decorator-4.0.11";
592 605 buildInputs = with self; [];
593 606 doCheck = false;
594 607 propagatedBuildInputs = with self; [];
595 608 src = fetchurl {
596 609 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
597 610 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
598 611 };
599 612 meta = {
600 613 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
601 614 };
602 615 };
603 616 deform = super.buildPythonPackage {
604 617 name = "deform-2.0.4";
605 618 buildInputs = with self; [];
606 619 doCheck = false;
607 620 propagatedBuildInputs = with self; [Chameleon colander iso8601 peppercorn translationstring zope.deprecation];
608 621 src = fetchurl {
609 622 url = "https://pypi.python.org/packages/66/3b/eefcb07abcab7a97f6665aa2d0cf1af741d9d6e78a2e4657fd2b89f89880/deform-2.0.4.tar.gz";
610 623 md5 = "34756e42cf50dd4b4430809116c4ec0a";
611 624 };
612 625 meta = {
613 626 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
614 627 };
615 628 };
616 629 docutils = super.buildPythonPackage {
617 630 name = "docutils-0.13.1";
618 631 buildInputs = with self; [];
619 632 doCheck = false;
620 633 propagatedBuildInputs = with self; [];
621 634 src = fetchurl {
622 635 url = "https://pypi.python.org/packages/05/25/7b5484aca5d46915493f1fd4ecb63c38c333bd32aa9ad6e19da8d08895ae/docutils-0.13.1.tar.gz";
623 636 md5 = "ea4a893c633c788be9b8078b6b305d53";
624 637 };
625 638 meta = {
626 639 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
627 640 };
628 641 };
629 642 dogpile.cache = super.buildPythonPackage {
630 643 name = "dogpile.cache-0.6.4";
631 644 buildInputs = with self; [];
632 645 doCheck = false;
633 646 propagatedBuildInputs = with self; [];
634 647 src = fetchurl {
635 648 url = "https://pypi.python.org/packages/b6/3d/35c05ca01c070bb70d9d422f2c4858ecb021b05b21af438fec5ccd7b945c/dogpile.cache-0.6.4.tar.gz";
636 649 md5 = "66e0a6cae6c08cb1ea25f89d0eadfeb0";
637 650 };
638 651 meta = {
639 652 license = [ pkgs.lib.licenses.bsdOriginal ];
640 653 };
641 654 };
642 655 dogpile.core = super.buildPythonPackage {
643 656 name = "dogpile.core-0.4.1";
644 657 buildInputs = with self; [];
645 658 doCheck = false;
646 659 propagatedBuildInputs = with self; [];
647 660 src = fetchurl {
648 661 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
649 662 md5 = "01cb19f52bba3e95c9b560f39341f045";
650 663 };
651 664 meta = {
652 665 license = [ pkgs.lib.licenses.bsdOriginal ];
653 666 };
654 667 };
655 668 ecdsa = super.buildPythonPackage {
656 669 name = "ecdsa-0.11";
657 670 buildInputs = with self; [];
658 671 doCheck = false;
659 672 propagatedBuildInputs = with self; [];
660 673 src = fetchurl {
661 674 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
662 675 md5 = "8ef586fe4dbb156697d756900cb41d7c";
663 676 };
664 677 meta = {
665 678 license = [ pkgs.lib.licenses.mit ];
666 679 };
667 680 };
668 681 elasticsearch = super.buildPythonPackage {
669 682 name = "elasticsearch-2.3.0";
670 683 buildInputs = with self; [];
671 684 doCheck = false;
672 685 propagatedBuildInputs = with self; [urllib3];
673 686 src = fetchurl {
674 687 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
675 688 md5 = "2550f3b51629cf1ef9636608af92c340";
676 689 };
677 690 meta = {
678 691 license = [ pkgs.lib.licenses.asl20 ];
679 692 };
680 693 };
681 694 elasticsearch-dsl = super.buildPythonPackage {
682 695 name = "elasticsearch-dsl-2.2.0";
683 696 buildInputs = with self; [];
684 697 doCheck = false;
685 698 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
686 699 src = fetchurl {
687 700 url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz";
688 701 md5 = "fa6bd3c87ea3caa8f0f051bc37c53221";
689 702 };
690 703 meta = {
691 704 license = [ pkgs.lib.licenses.asl20 ];
692 705 };
693 706 };
694 707 entrypoints = super.buildPythonPackage {
695 708 name = "entrypoints-0.2.2";
696 709 buildInputs = with self; [];
697 710 doCheck = false;
698 711 propagatedBuildInputs = with self; [configparser];
699 712 src = fetchurl {
700 713 url = "https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313";
701 714 md5 = "7db37771aea9ac9fefe093e5d6987313";
702 715 };
703 716 meta = {
704 717 license = [ pkgs.lib.licenses.mit ];
705 718 };
706 719 };
707 720 enum34 = super.buildPythonPackage {
708 721 name = "enum34-1.1.6";
709 722 buildInputs = with self; [];
710 723 doCheck = false;
711 724 propagatedBuildInputs = with self; [];
712 725 src = fetchurl {
713 726 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
714 727 md5 = "5f13a0841a61f7fc295c514490d120d0";
715 728 };
716 729 meta = {
717 730 license = [ pkgs.lib.licenses.bsdOriginal ];
718 731 };
719 732 };
720 733 funcsigs = super.buildPythonPackage {
721 734 name = "funcsigs-1.0.2";
722 735 buildInputs = with self; [];
723 736 doCheck = false;
724 737 propagatedBuildInputs = with self; [];
725 738 src = fetchurl {
726 739 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
727 740 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
728 741 };
729 742 meta = {
730 743 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
731 744 };
732 745 };
733 746 functools32 = super.buildPythonPackage {
734 747 name = "functools32-3.2.3.post2";
735 748 buildInputs = with self; [];
736 749 doCheck = false;
737 750 propagatedBuildInputs = with self; [];
738 751 src = fetchurl {
739 752 url = "https://pypi.python.org/packages/5e/1a/0aa2c8195a204a9f51284018562dea77e25511f02fe924fac202fc012172/functools32-3.2.3-2.zip";
740 753 md5 = "d55232eb132ec779e6893c902a0bc5ad";
741 754 };
742 755 meta = {
743 756 license = [ pkgs.lib.licenses.psfl ];
744 757 };
745 758 };
746 759 future = super.buildPythonPackage {
747 760 name = "future-0.14.3";
748 761 buildInputs = with self; [];
749 762 doCheck = false;
750 763 propagatedBuildInputs = with self; [];
751 764 src = fetchurl {
752 765 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
753 766 md5 = "e94079b0bd1fc054929e8769fc0f6083";
754 767 };
755 768 meta = {
756 769 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
757 770 };
758 771 };
759 772 futures = super.buildPythonPackage {
760 773 name = "futures-3.0.2";
761 774 buildInputs = with self; [];
762 775 doCheck = false;
763 776 propagatedBuildInputs = with self; [];
764 777 src = fetchurl {
765 778 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
766 779 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
767 780 };
768 781 meta = {
769 782 license = [ pkgs.lib.licenses.bsdOriginal ];
770 783 };
771 784 };
772 785 gevent = super.buildPythonPackage {
773 786 name = "gevent-1.2.2";
774 787 buildInputs = with self; [];
775 788 doCheck = false;
776 789 propagatedBuildInputs = with self; [greenlet];
777 790 src = fetchurl {
778 791 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
779 792 md5 = "7f0baf355384fe5ff2ecf66853422554";
780 793 };
781 794 meta = {
782 795 license = [ pkgs.lib.licenses.mit ];
783 796 };
784 797 };
785 798 gnureadline = super.buildPythonPackage {
786 799 name = "gnureadline-6.3.3";
787 800 buildInputs = with self; [];
788 801 doCheck = false;
789 802 propagatedBuildInputs = with self; [];
790 803 src = fetchurl {
791 804 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
792 805 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
793 806 };
794 807 meta = {
795 808 license = [ pkgs.lib.licenses.gpl1 ];
796 809 };
797 810 };
798 811 gprof2dot = super.buildPythonPackage {
799 812 name = "gprof2dot-2016.10.13";
800 813 buildInputs = with self; [];
801 814 doCheck = false;
802 815 propagatedBuildInputs = with self; [];
803 816 src = fetchurl {
804 817 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
805 818 md5 = "0125401f15fd2afe1df686a76c64a4fd";
806 819 };
807 820 meta = {
808 821 license = [ { fullName = "LGPL"; } ];
809 822 };
810 823 };
811 824 graphviz = super.buildPythonPackage {
812 825 name = "graphviz-0.7.1";
813 826 buildInputs = with self; [];
814 827 doCheck = false;
815 828 propagatedBuildInputs = with self; [];
816 829 src = fetchurl {
817 830 url = "https://pypi.python.org/packages/7d/2d/f5cfa56467ca5a65eb44e1103d89d2f65dbc4f04cf7a1f3d38e973c3d1a8/graphviz-0.7.1.zip";
818 831 md5 = "d5926e89975121d56dec777a79bfc9d1";
819 832 };
820 833 meta = {
821 834 license = [ pkgs.lib.licenses.mit ];
822 835 };
823 836 };
824 837 greenlet = super.buildPythonPackage {
825 838 name = "greenlet-0.4.12";
826 839 buildInputs = with self; [];
827 840 doCheck = false;
828 841 propagatedBuildInputs = with self; [];
829 842 src = fetchurl {
830 843 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
831 844 md5 = "e8637647d58a26c4a1f51ca393e53c00";
832 845 };
833 846 meta = {
834 847 license = [ pkgs.lib.licenses.mit ];
835 848 };
836 849 };
837 850 gunicorn = super.buildPythonPackage {
838 851 name = "gunicorn-19.7.1";
839 852 buildInputs = with self; [];
840 853 doCheck = false;
841 854 propagatedBuildInputs = with self; [];
842 855 src = fetchurl {
843 856 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
844 857 md5 = "174d3c3cd670a5be0404d84c484e590c";
845 858 };
846 859 meta = {
847 860 license = [ pkgs.lib.licenses.mit ];
848 861 };
849 862 };
850 863 html5lib = super.buildPythonPackage {
851 864 name = "html5lib-0.9999999";
852 865 buildInputs = with self; [];
853 866 doCheck = false;
854 867 propagatedBuildInputs = with self; [six];
855 868 src = fetchurl {
856 869 url = "https://pypi.python.org/packages/ae/ae/bcb60402c60932b32dfaf19bb53870b29eda2cd17551ba5639219fb5ebf9/html5lib-0.9999999.tar.gz";
857 870 md5 = "ef43cb05e9e799f25d65d1135838a96f";
858 871 };
859 872 meta = {
860 873 license = [ pkgs.lib.licenses.mit ];
861 874 };
862 875 };
863 876 infrae.cache = super.buildPythonPackage {
864 877 name = "infrae.cache-1.0.1";
865 878 buildInputs = with self; [];
866 879 doCheck = false;
867 880 propagatedBuildInputs = with self; [Beaker repoze.lru];
868 881 src = fetchurl {
869 882 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
870 883 md5 = "b09076a766747e6ed2a755cc62088e32";
871 884 };
872 885 meta = {
873 886 license = [ pkgs.lib.licenses.zpt21 ];
874 887 };
875 888 };
876 889 invoke = super.buildPythonPackage {
877 890 name = "invoke-0.13.0";
878 891 buildInputs = with self; [];
879 892 doCheck = false;
880 893 propagatedBuildInputs = with self; [];
881 894 src = fetchurl {
882 895 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
883 896 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
884 897 };
885 898 meta = {
886 899 license = [ pkgs.lib.licenses.bsdOriginal ];
887 900 };
888 901 };
889 902 ipdb = super.buildPythonPackage {
890 903 name = "ipdb-0.10.3";
891 904 buildInputs = with self; [];
892 905 doCheck = false;
893 906 propagatedBuildInputs = with self; [setuptools ipython];
894 907 src = fetchurl {
895 908 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
896 909 md5 = "def1f6ac075d54bdee07e6501263d4fa";
897 910 };
898 911 meta = {
899 912 license = [ pkgs.lib.licenses.bsdOriginal ];
900 913 };
901 914 };
902 915 ipython = super.buildPythonPackage {
903 916 name = "ipython-5.1.0";
904 917 buildInputs = with self; [];
905 918 doCheck = false;
906 919 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
907 920 src = fetchurl {
908 921 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
909 922 md5 = "47c8122420f65b58784cb4b9b4af35e3";
910 923 };
911 924 meta = {
912 925 license = [ pkgs.lib.licenses.bsdOriginal ];
913 926 };
914 927 };
915 928 ipython-genutils = super.buildPythonPackage {
916 929 name = "ipython-genutils-0.2.0";
917 930 buildInputs = with self; [];
918 931 doCheck = false;
919 932 propagatedBuildInputs = with self; [];
920 933 src = fetchurl {
921 934 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
922 935 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
923 936 };
924 937 meta = {
925 938 license = [ pkgs.lib.licenses.bsdOriginal ];
926 939 };
927 940 };
928 941 iso8601 = super.buildPythonPackage {
929 942 name = "iso8601-0.1.11";
930 943 buildInputs = with self; [];
931 944 doCheck = false;
932 945 propagatedBuildInputs = with self; [];
933 946 src = fetchurl {
934 947 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
935 948 md5 = "b06d11cd14a64096f907086044f0fe38";
936 949 };
937 950 meta = {
938 951 license = [ pkgs.lib.licenses.mit ];
939 952 };
940 953 };
941 954 itsdangerous = super.buildPythonPackage {
942 955 name = "itsdangerous-0.24";
943 956 buildInputs = with self; [];
944 957 doCheck = false;
945 958 propagatedBuildInputs = with self; [];
946 959 src = fetchurl {
947 960 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
948 961 md5 = "a3d55aa79369aef5345c036a8a26307f";
949 962 };
950 963 meta = {
951 964 license = [ pkgs.lib.licenses.bsdOriginal ];
952 965 };
953 966 };
954 967 jsonschema = super.buildPythonPackage {
955 968 name = "jsonschema-2.6.0";
956 969 buildInputs = with self; [];
957 970 doCheck = false;
958 971 propagatedBuildInputs = with self; [functools32];
959 972 src = fetchurl {
960 973 url = "https://pypi.python.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
961 974 md5 = "50c6b69a373a8b55ff1e0ec6e78f13f4";
962 975 };
963 976 meta = {
964 977 license = [ pkgs.lib.licenses.mit ];
965 978 };
966 979 };
967 980 jupyter-client = super.buildPythonPackage {
968 981 name = "jupyter-client-5.0.0";
969 982 buildInputs = with self; [];
970 983 doCheck = false;
971 984 propagatedBuildInputs = with self; [traitlets jupyter-core pyzmq python-dateutil];
972 985 src = fetchurl {
973 986 url = "https://pypi.python.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
974 987 md5 = "1acd331b5c9fb4d79dae9939e79f2426";
975 988 };
976 989 meta = {
977 990 license = [ pkgs.lib.licenses.bsdOriginal ];
978 991 };
979 992 };
980 993 jupyter-core = super.buildPythonPackage {
981 994 name = "jupyter-core-4.3.0";
982 995 buildInputs = with self; [];
983 996 doCheck = false;
984 997 propagatedBuildInputs = with self; [traitlets];
985 998 src = fetchurl {
986 999 url = "https://pypi.python.org/packages/2f/39/5138f975100ce14d150938df48a83cd852a3fd8e24b1244f4113848e69e2/jupyter_core-4.3.0.tar.gz";
987 1000 md5 = "18819511a809afdeed9a995a9c27bcfb";
988 1001 };
989 1002 meta = {
990 1003 license = [ pkgs.lib.licenses.bsdOriginal ];
991 1004 };
992 1005 };
1006 hupper = super.buildPythonPackage {
1007 name = "hupper-1.0";
1008 buildInputs = with self; [];
1009 doCheck = false;
1010 propagatedBuildInputs = with self; [];
1011 src = fetchurl {
1012 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
1013 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
1014 };
1015 meta = {
1016 license = [ pkgs.lib.licenses.mit ];
1017 };
1018 };
993 1019 kombu = super.buildPythonPackage {
994 1020 name = "kombu-1.5.1";
995 1021 buildInputs = with self; [];
996 1022 doCheck = false;
997 1023 propagatedBuildInputs = with self; [anyjson amqplib];
998 1024 src = fetchurl {
999 1025 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
1000 1026 md5 = "50662f3c7e9395b3d0721fb75d100b63";
1001 1027 };
1002 1028 meta = {
1003 1029 license = [ pkgs.lib.licenses.bsdOriginal ];
1004 1030 };
1005 1031 };
1006 1032 lxml = super.buildPythonPackage {
1007 1033 name = "lxml-3.7.3";
1008 1034 buildInputs = with self; [];
1009 1035 doCheck = false;
1010 1036 propagatedBuildInputs = with self; [];
1011 1037 src = fetchurl {
1012 1038 url = "https://pypi.python.org/packages/39/e8/a8e0b1fa65dd021d48fe21464f71783655f39a41f218293c1c590d54eb82/lxml-3.7.3.tar.gz";
1013 1039 md5 = "075692ce442e69bbd604d44e21c02753";
1014 1040 };
1015 1041 meta = {
1016 1042 license = [ pkgs.lib.licenses.bsdOriginal ];
1017 1043 };
1018 1044 };
1019 1045 meld3 = super.buildPythonPackage {
1020 1046 name = "meld3-1.0.2";
1021 1047 buildInputs = with self; [];
1022 1048 doCheck = false;
1023 1049 propagatedBuildInputs = with self; [];
1024 1050 src = fetchurl {
1025 1051 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
1026 1052 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
1027 1053 };
1028 1054 meta = {
1029 1055 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1030 1056 };
1031 1057 };
1032 1058 mistune = super.buildPythonPackage {
1033 1059 name = "mistune-0.7.4";
1034 1060 buildInputs = with self; [];
1035 1061 doCheck = false;
1036 1062 propagatedBuildInputs = with self; [];
1037 1063 src = fetchurl {
1038 1064 url = "https://pypi.python.org/packages/25/a4/12a584c0c59c9fed529f8b3c47ca8217c0cf8bcc5e1089d3256410cfbdbc/mistune-0.7.4.tar.gz";
1039 1065 md5 = "92d01cb717e9e74429e9bde9d29ac43b";
1040 1066 };
1041 1067 meta = {
1042 1068 license = [ pkgs.lib.licenses.bsdOriginal ];
1043 1069 };
1044 1070 };
1045 1071 mock = super.buildPythonPackage {
1046 1072 name = "mock-1.0.1";
1047 1073 buildInputs = with self; [];
1048 1074 doCheck = false;
1049 1075 propagatedBuildInputs = with self; [];
1050 1076 src = fetchurl {
1051 1077 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
1052 1078 md5 = "869f08d003c289a97c1a6610faf5e913";
1053 1079 };
1054 1080 meta = {
1055 1081 license = [ pkgs.lib.licenses.bsdOriginal ];
1056 1082 };
1057 1083 };
1058 1084 msgpack-python = super.buildPythonPackage {
1059 1085 name = "msgpack-python-0.4.8";
1060 1086 buildInputs = with self; [];
1061 1087 doCheck = false;
1062 1088 propagatedBuildInputs = with self; [];
1063 1089 src = fetchurl {
1064 1090 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
1065 1091 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
1066 1092 };
1067 1093 meta = {
1068 1094 license = [ pkgs.lib.licenses.asl20 ];
1069 1095 };
1070 1096 };
1071 1097 nbconvert = super.buildPythonPackage {
1072 1098 name = "nbconvert-5.1.1";
1073 1099 buildInputs = with self; [];
1074 1100 doCheck = false;
1075 1101 propagatedBuildInputs = with self; [mistune Jinja2 Pygments traitlets jupyter-core nbformat entrypoints bleach pandocfilters testpath];
1076 1102 src = fetchurl {
1077 1103 url = "https://pypi.python.org/packages/95/58/df1c91f1658ee5df19097f915a1e71c91fc824a708d82d2b2e35f8b80e9a/nbconvert-5.1.1.tar.gz";
1078 1104 md5 = "d0263fb03a44db2f94eea09a608ed813";
1079 1105 };
1080 1106 meta = {
1081 1107 license = [ pkgs.lib.licenses.bsdOriginal ];
1082 1108 };
1083 1109 };
1084 1110 nbformat = super.buildPythonPackage {
1085 1111 name = "nbformat-4.3.0";
1086 1112 buildInputs = with self; [];
1087 1113 doCheck = false;
1088 1114 propagatedBuildInputs = with self; [ipython-genutils traitlets jsonschema jupyter-core];
1089 1115 src = fetchurl {
1090 1116 url = "https://pypi.python.org/packages/f9/c5/89df4abf906f766727f976e170caa85b4f1c1d1feb1f45d716016e68e19f/nbformat-4.3.0.tar.gz";
1091 1117 md5 = "9a00d20425914cd5ba5f97769d9963ca";
1092 1118 };
1093 1119 meta = {
1094 1120 license = [ pkgs.lib.licenses.bsdOriginal ];
1095 1121 };
1096 1122 };
1097 1123 nose = super.buildPythonPackage {
1098 1124 name = "nose-1.3.6";
1099 1125 buildInputs = with self; [];
1100 1126 doCheck = false;
1101 1127 propagatedBuildInputs = with self; [];
1102 1128 src = fetchurl {
1103 1129 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
1104 1130 md5 = "0ca546d81ca8309080fc80cb389e7a16";
1105 1131 };
1106 1132 meta = {
1107 1133 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
1108 1134 };
1109 1135 };
1110 1136 objgraph = super.buildPythonPackage {
1111 1137 name = "objgraph-3.1.0";
1112 1138 buildInputs = with self; [];
1113 1139 doCheck = false;
1114 1140 propagatedBuildInputs = with self; [graphviz];
1115 1141 src = fetchurl {
1116 1142 url = "https://pypi.python.org/packages/f4/b3/082e54e62094cb2ec84f8d5a49e0142cef99016491cecba83309cff920ae/objgraph-3.1.0.tar.gz";
1117 1143 md5 = "eddbd96039796bfbd13eee403701e64a";
1118 1144 };
1119 1145 meta = {
1120 1146 license = [ pkgs.lib.licenses.mit ];
1121 1147 };
1122 1148 };
1123 1149 packaging = super.buildPythonPackage {
1124 1150 name = "packaging-15.2";
1125 1151 buildInputs = with self; [];
1126 1152 doCheck = false;
1127 1153 propagatedBuildInputs = with self; [];
1128 1154 src = fetchurl {
1129 1155 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1130 1156 md5 = "c16093476f6ced42128bf610e5db3784";
1131 1157 };
1132 1158 meta = {
1133 1159 license = [ pkgs.lib.licenses.asl20 ];
1134 1160 };
1135 1161 };
1136 1162 pandocfilters = super.buildPythonPackage {
1137 1163 name = "pandocfilters-1.4.1";
1138 1164 buildInputs = with self; [];
1139 1165 doCheck = false;
1140 1166 propagatedBuildInputs = with self; [];
1141 1167 src = fetchurl {
1142 1168 url = "https://pypi.python.org/packages/e3/1f/21d1b7e8ca571e80b796c758d361fdf5554335ff138158654684bc5401d8/pandocfilters-1.4.1.tar.gz";
1143 1169 md5 = "7680d9f9ec07397dd17f380ee3818b9d";
1144 1170 };
1145 1171 meta = {
1146 1172 license = [ pkgs.lib.licenses.bsdOriginal ];
1147 1173 };
1148 1174 };
1149 1175 paramiko = super.buildPythonPackage {
1150 1176 name = "paramiko-1.15.1";
1151 1177 buildInputs = with self; [];
1152 1178 doCheck = false;
1153 1179 propagatedBuildInputs = with self; [pycrypto ecdsa];
1154 1180 src = fetchurl {
1155 1181 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
1156 1182 md5 = "48c274c3f9b1282932567b21f6acf3b5";
1157 1183 };
1158 1184 meta = {
1159 1185 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1160 1186 };
1161 1187 };
1162 1188 pathlib2 = super.buildPythonPackage {
1163 1189 name = "pathlib2-2.3.0";
1164 1190 buildInputs = with self; [];
1165 1191 doCheck = false;
1166 1192 propagatedBuildInputs = with self; [six scandir];
1167 1193 src = fetchurl {
1168 1194 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
1169 1195 md5 = "89c90409d11fd5947966b6a30a47d18c";
1170 1196 };
1171 1197 meta = {
1172 1198 license = [ pkgs.lib.licenses.mit ];
1173 1199 };
1174 1200 };
1175 1201 peppercorn = super.buildPythonPackage {
1176 1202 name = "peppercorn-0.5";
1177 1203 buildInputs = with self; [];
1178 1204 doCheck = false;
1179 1205 propagatedBuildInputs = with self; [];
1180 1206 src = fetchurl {
1181 1207 url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz";
1182 1208 md5 = "f08efbca5790019ab45d76b7244abd40";
1183 1209 };
1184 1210 meta = {
1185 1211 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1186 1212 };
1187 1213 };
1188 1214 pexpect = super.buildPythonPackage {
1189 1215 name = "pexpect-4.2.1";
1190 1216 buildInputs = with self; [];
1191 1217 doCheck = false;
1192 1218 propagatedBuildInputs = with self; [ptyprocess];
1193 1219 src = fetchurl {
1194 1220 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
1195 1221 md5 = "3694410001a99dff83f0b500a1ca1c95";
1196 1222 };
1197 1223 meta = {
1198 1224 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1199 1225 };
1200 1226 };
1201 1227 pickleshare = super.buildPythonPackage {
1202 1228 name = "pickleshare-0.7.4";
1203 1229 buildInputs = with self; [];
1204 1230 doCheck = false;
1205 1231 propagatedBuildInputs = with self; [pathlib2];
1206 1232 src = fetchurl {
1207 1233 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
1208 1234 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
1209 1235 };
1210 1236 meta = {
1211 1237 license = [ pkgs.lib.licenses.mit ];
1212 1238 };
1213 1239 };
1240 plaster = super.buildPythonPackage {
1241 name = "plaster-0.5";
1242 buildInputs = with self; [];
1243 doCheck = false;
1244 propagatedBuildInputs = with self; [setuptools];
1245 src = fetchurl {
1246 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
1247 md5 = "c59345a67a860cfcaa1bd6a81451399d";
1248 };
1249 meta = {
1250 license = [ pkgs.lib.licenses.mit ];
1251 };
1252 };
1253 plaster-pastedeploy = super.buildPythonPackage {
1254 name = "plaster-pastedeploy-0.4.1";
1255 buildInputs = with self; [];
1256 doCheck = false;
1257 propagatedBuildInputs = with self; [PasteDeploy plaster];
1258 src = fetchurl {
1259 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
1260 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
1261 };
1262 meta = {
1263 license = [ pkgs.lib.licenses.mit ];
1264 };
1265 };
1214 1266 prompt-toolkit = super.buildPythonPackage {
1215 1267 name = "prompt-toolkit-1.0.14";
1216 1268 buildInputs = with self; [];
1217 1269 doCheck = false;
1218 1270 propagatedBuildInputs = with self; [six wcwidth];
1219 1271 src = fetchurl {
1220 1272 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
1221 1273 md5 = "f24061ae133ed32c6b764e92bd48c496";
1222 1274 };
1223 1275 meta = {
1224 1276 license = [ pkgs.lib.licenses.bsdOriginal ];
1225 1277 };
1226 1278 };
1227 1279 psutil = super.buildPythonPackage {
1228 1280 name = "psutil-4.3.1";
1229 1281 buildInputs = with self; [];
1230 1282 doCheck = false;
1231 1283 propagatedBuildInputs = with self; [];
1232 1284 src = fetchurl {
1233 1285 url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz";
1234 1286 md5 = "199a366dba829c88bddaf5b41d19ddc0";
1235 1287 };
1236 1288 meta = {
1237 1289 license = [ pkgs.lib.licenses.bsdOriginal ];
1238 1290 };
1239 1291 };
1240 1292 psycopg2 = super.buildPythonPackage {
1241 1293 name = "psycopg2-2.7.1";
1242 1294 buildInputs = with self; [];
1243 1295 doCheck = false;
1244 1296 propagatedBuildInputs = with self; [];
1245 1297 src = fetchurl {
1246 1298 url = "https://pypi.python.org/packages/f8/e9/5793369ce8a41bf5467623ded8d59a434dfef9c136351aca4e70c2657ba0/psycopg2-2.7.1.tar.gz";
1247 1299 md5 = "67848ac33af88336046802f6ef7081f3";
1248 1300 };
1249 1301 meta = {
1250 1302 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1251 1303 };
1252 1304 };
1253 1305 ptyprocess = super.buildPythonPackage {
1254 1306 name = "ptyprocess-0.5.2";
1255 1307 buildInputs = with self; [];
1256 1308 doCheck = false;
1257 1309 propagatedBuildInputs = with self; [];
1258 1310 src = fetchurl {
1259 1311 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
1260 1312 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
1261 1313 };
1262 1314 meta = {
1263 1315 license = [ ];
1264 1316 };
1265 1317 };
1266 1318 py = super.buildPythonPackage {
1267 1319 name = "py-1.4.34";
1268 1320 buildInputs = with self; [];
1269 1321 doCheck = false;
1270 1322 propagatedBuildInputs = with self; [];
1271 1323 src = fetchurl {
1272 1324 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
1273 1325 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
1274 1326 };
1275 1327 meta = {
1276 1328 license = [ pkgs.lib.licenses.mit ];
1277 1329 };
1278 1330 };
1279 1331 py-bcrypt = super.buildPythonPackage {
1280 1332 name = "py-bcrypt-0.4";
1281 1333 buildInputs = with self; [];
1282 1334 doCheck = false;
1283 1335 propagatedBuildInputs = with self; [];
1284 1336 src = fetchurl {
1285 1337 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1286 1338 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1287 1339 };
1288 1340 meta = {
1289 1341 license = [ pkgs.lib.licenses.bsdOriginal ];
1290 1342 };
1291 1343 };
1292 1344 py-gfm = super.buildPythonPackage {
1293 1345 name = "py-gfm-0.1.3";
1294 1346 buildInputs = with self; [];
1295 1347 doCheck = false;
1296 1348 propagatedBuildInputs = with self; [setuptools Markdown];
1297 1349 src = fetchurl {
1298 1350 url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16";
1299 1351 md5 = "0d0d5385bfb629eea636a80b9c2bfd16";
1300 1352 };
1301 1353 meta = {
1302 1354 license = [ pkgs.lib.licenses.bsdOriginal ];
1303 1355 };
1304 1356 };
1305 1357 pycrypto = super.buildPythonPackage {
1306 1358 name = "pycrypto-2.6.1";
1307 1359 buildInputs = with self; [];
1308 1360 doCheck = false;
1309 1361 propagatedBuildInputs = with self; [];
1310 1362 src = fetchurl {
1311 1363 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1312 1364 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1313 1365 };
1314 1366 meta = {
1315 1367 license = [ pkgs.lib.licenses.publicDomain ];
1316 1368 };
1317 1369 };
1318 1370 pycurl = super.buildPythonPackage {
1319 1371 name = "pycurl-7.19.5";
1320 1372 buildInputs = with self; [];
1321 1373 doCheck = false;
1322 1374 propagatedBuildInputs = with self; [];
1323 1375 src = fetchurl {
1324 1376 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1325 1377 md5 = "47b4eac84118e2606658122104e62072";
1326 1378 };
1327 1379 meta = {
1328 1380 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1329 1381 };
1330 1382 };
1331 1383 pyflakes = super.buildPythonPackage {
1332 1384 name = "pyflakes-0.8.1";
1333 1385 buildInputs = with self; [];
1334 1386 doCheck = false;
1335 1387 propagatedBuildInputs = with self; [];
1336 1388 src = fetchurl {
1337 1389 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1338 1390 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1339 1391 };
1340 1392 meta = {
1341 1393 license = [ pkgs.lib.licenses.mit ];
1342 1394 };
1343 1395 };
1344 1396 pygments-markdown-lexer = super.buildPythonPackage {
1345 1397 name = "pygments-markdown-lexer-0.1.0.dev39";
1346 1398 buildInputs = with self; [];
1347 1399 doCheck = false;
1348 1400 propagatedBuildInputs = with self; [Pygments];
1349 1401 src = fetchurl {
1350 1402 url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip";
1351 1403 md5 = "6360fe0f6d1f896e35b7a0142ce6459c";
1352 1404 };
1353 1405 meta = {
1354 1406 license = [ pkgs.lib.licenses.asl20 ];
1355 1407 };
1356 1408 };
1357 1409 pyparsing = super.buildPythonPackage {
1358 1410 name = "pyparsing-1.5.7";
1359 1411 buildInputs = with self; [];
1360 1412 doCheck = false;
1361 1413 propagatedBuildInputs = with self; [];
1362 1414 src = fetchurl {
1363 1415 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1364 1416 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1365 1417 };
1366 1418 meta = {
1367 1419 license = [ pkgs.lib.licenses.mit ];
1368 1420 };
1369 1421 };
1370 1422 pyramid = super.buildPythonPackage {
1371 name = "pyramid-1.7.4";
1423 name = "pyramid-1.9";
1372 1424 buildInputs = with self; [];
1373 1425 doCheck = false;
1374 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1426 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
1375 1427 src = fetchurl {
1376 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
1377 md5 = "6ef1dfdcff9136d04490410757c4c446";
1428 url = "https://pypi.python.org/packages/b0/73/715321e129334f3e41430bede877620175a63ed075fd5d1fd2c25b7cb121/pyramid-1.9.tar.gz";
1429 md5 = "aa6c7c568f83151af51eb053ac633bc4";
1378 1430 };
1379 1431 meta = {
1380 1432 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1381 1433 };
1382 1434 };
1383 1435 pyramid-beaker = super.buildPythonPackage {
1384 1436 name = "pyramid-beaker-0.8";
1385 1437 buildInputs = with self; [];
1386 1438 doCheck = false;
1387 1439 propagatedBuildInputs = with self; [pyramid Beaker];
1388 1440 src = fetchurl {
1389 1441 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1390 1442 md5 = "22f14be31b06549f80890e2c63a93834";
1391 1443 };
1392 1444 meta = {
1393 1445 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1394 1446 };
1395 1447 };
1396 1448 pyramid-debugtoolbar = super.buildPythonPackage {
1397 1449 name = "pyramid-debugtoolbar-3.0.5";
1398 1450 buildInputs = with self; [];
1399 1451 doCheck = false;
1400 1452 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1401 1453 src = fetchurl {
1402 1454 url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz";
1403 1455 md5 = "aebab8c3bfdc6f89e4d3adc1d126538e";
1404 1456 };
1405 1457 meta = {
1406 1458 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1407 1459 };
1408 1460 };
1409 1461 pyramid-jinja2 = super.buildPythonPackage {
1410 1462 name = "pyramid-jinja2-2.5";
1411 1463 buildInputs = with self; [];
1412 1464 doCheck = false;
1413 1465 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1414 1466 src = fetchurl {
1415 1467 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1416 1468 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1417 1469 };
1418 1470 meta = {
1419 1471 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1420 1472 };
1421 1473 };
1422 1474 pyramid-mako = super.buildPythonPackage {
1423 1475 name = "pyramid-mako-1.0.2";
1424 1476 buildInputs = with self; [];
1425 1477 doCheck = false;
1426 1478 propagatedBuildInputs = with self; [pyramid Mako];
1427 1479 src = fetchurl {
1428 1480 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1429 1481 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1430 1482 };
1431 1483 meta = {
1432 1484 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1433 1485 };
1434 1486 };
1435 1487 pysqlite = super.buildPythonPackage {
1436 1488 name = "pysqlite-2.8.3";
1437 1489 buildInputs = with self; [];
1438 1490 doCheck = false;
1439 1491 propagatedBuildInputs = with self; [];
1440 1492 src = fetchurl {
1441 1493 url = "https://pypi.python.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1442 1494 md5 = "033f17b8644577715aee55e8832ac9fc";
1443 1495 };
1444 1496 meta = {
1445 1497 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1446 1498 };
1447 1499 };
1448 1500 pytest = super.buildPythonPackage {
1449 1501 name = "pytest-3.1.2";
1450 1502 buildInputs = with self; [];
1451 1503 doCheck = false;
1452 1504 propagatedBuildInputs = with self; [py setuptools];
1453 1505 src = fetchurl {
1454 1506 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
1455 1507 md5 = "c4d179f89043cc925e1c169d03128e02";
1456 1508 };
1457 1509 meta = {
1458 1510 license = [ pkgs.lib.licenses.mit ];
1459 1511 };
1460 1512 };
1461 1513 pytest-catchlog = super.buildPythonPackage {
1462 1514 name = "pytest-catchlog-1.2.2";
1463 1515 buildInputs = with self; [];
1464 1516 doCheck = false;
1465 1517 propagatedBuildInputs = with self; [py pytest];
1466 1518 src = fetchurl {
1467 1519 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1468 1520 md5 = "09d890c54c7456c818102b7ff8c182c8";
1469 1521 };
1470 1522 meta = {
1471 1523 license = [ pkgs.lib.licenses.mit ];
1472 1524 };
1473 1525 };
1474 1526 pytest-cov = super.buildPythonPackage {
1475 1527 name = "pytest-cov-2.5.1";
1476 1528 buildInputs = with self; [];
1477 1529 doCheck = false;
1478 1530 propagatedBuildInputs = with self; [pytest coverage];
1479 1531 src = fetchurl {
1480 1532 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
1481 1533 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
1482 1534 };
1483 1535 meta = {
1484 1536 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1485 1537 };
1486 1538 };
1487 1539 pytest-profiling = super.buildPythonPackage {
1488 1540 name = "pytest-profiling-1.2.6";
1489 1541 buildInputs = with self; [];
1490 1542 doCheck = false;
1491 1543 propagatedBuildInputs = with self; [six pytest gprof2dot];
1492 1544 src = fetchurl {
1493 1545 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
1494 1546 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
1495 1547 };
1496 1548 meta = {
1497 1549 license = [ pkgs.lib.licenses.mit ];
1498 1550 };
1499 1551 };
1500 1552 pytest-runner = super.buildPythonPackage {
1501 1553 name = "pytest-runner-2.11.1";
1502 1554 buildInputs = with self; [];
1503 1555 doCheck = false;
1504 1556 propagatedBuildInputs = with self; [];
1505 1557 src = fetchurl {
1506 1558 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
1507 1559 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
1508 1560 };
1509 1561 meta = {
1510 1562 license = [ pkgs.lib.licenses.mit ];
1511 1563 };
1512 1564 };
1513 1565 pytest-sugar = super.buildPythonPackage {
1514 1566 name = "pytest-sugar-0.8.0";
1515 1567 buildInputs = with self; [];
1516 1568 doCheck = false;
1517 1569 propagatedBuildInputs = with self; [pytest termcolor];
1518 1570 src = fetchurl {
1519 1571 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
1520 1572 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
1521 1573 };
1522 1574 meta = {
1523 1575 license = [ pkgs.lib.licenses.bsdOriginal ];
1524 1576 };
1525 1577 };
1526 1578 pytest-timeout = super.buildPythonPackage {
1527 1579 name = "pytest-timeout-1.2.0";
1528 1580 buildInputs = with self; [];
1529 1581 doCheck = false;
1530 1582 propagatedBuildInputs = with self; [pytest];
1531 1583 src = fetchurl {
1532 1584 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
1533 1585 md5 = "83607d91aa163562c7ee835da57d061d";
1534 1586 };
1535 1587 meta = {
1536 1588 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1537 1589 };
1538 1590 };
1539 1591 python-dateutil = super.buildPythonPackage {
1540 1592 name = "python-dateutil-2.1";
1541 1593 buildInputs = with self; [];
1542 1594 doCheck = false;
1543 1595 propagatedBuildInputs = with self; [six];
1544 1596 src = fetchurl {
1545 1597 url = "https://pypi.python.org/packages/65/52/9c18dac21f174ad31b65e22d24297864a954e6fe65876eba3f5773d2da43/python-dateutil-2.1.tar.gz";
1546 1598 md5 = "1534bb15cf311f07afaa3aacba1c028b";
1547 1599 };
1548 1600 meta = {
1549 1601 license = [ { fullName = "Simplified BSD"; } ];
1550 1602 };
1551 1603 };
1552 1604 python-editor = super.buildPythonPackage {
1553 1605 name = "python-editor-1.0.3";
1554 1606 buildInputs = with self; [];
1555 1607 doCheck = false;
1556 1608 propagatedBuildInputs = with self; [];
1557 1609 src = fetchurl {
1558 1610 url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz";
1559 1611 md5 = "0aca5f2ef176ce68e98a5b7e31372835";
1560 1612 };
1561 1613 meta = {
1562 1614 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1563 1615 };
1564 1616 };
1565 1617 python-ldap = super.buildPythonPackage {
1566 1618 name = "python-ldap-2.4.40";
1567 1619 buildInputs = with self; [];
1568 1620 doCheck = false;
1569 1621 propagatedBuildInputs = with self; [setuptools];
1570 1622 src = fetchurl {
1571 1623 url = "https://pypi.python.org/packages/4a/d8/7d70a7469058a3987d224061a81d778951ac2b48220bdcc511e4b1b37176/python-ldap-2.4.40.tar.gz";
1572 1624 md5 = "aea0233f7d39b0c7549fcd310deeb0e5";
1573 1625 };
1574 1626 meta = {
1575 1627 license = [ pkgs.lib.licenses.psfl ];
1576 1628 };
1577 1629 };
1578 1630 python-memcached = super.buildPythonPackage {
1579 1631 name = "python-memcached-1.58";
1580 1632 buildInputs = with self; [];
1581 1633 doCheck = false;
1582 1634 propagatedBuildInputs = with self; [six];
1583 1635 src = fetchurl {
1584 1636 url = "https://pypi.python.org/packages/f7/62/14b2448cfb04427366f24104c9da97cf8ea380d7258a3233f066a951a8d8/python-memcached-1.58.tar.gz";
1585 1637 md5 = "23b258105013d14d899828d334e6b044";
1586 1638 };
1587 1639 meta = {
1588 1640 license = [ pkgs.lib.licenses.psfl ];
1589 1641 };
1590 1642 };
1591 1643 python-pam = super.buildPythonPackage {
1592 1644 name = "python-pam-1.8.2";
1593 1645 buildInputs = with self; [];
1594 1646 doCheck = false;
1595 1647 propagatedBuildInputs = with self; [];
1596 1648 src = fetchurl {
1597 1649 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1598 1650 md5 = "db71b6b999246fb05d78ecfbe166629d";
1599 1651 };
1600 1652 meta = {
1601 1653 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1602 1654 };
1603 1655 };
1604 1656 pytz = super.buildPythonPackage {
1605 1657 name = "pytz-2015.4";
1606 1658 buildInputs = with self; [];
1607 1659 doCheck = false;
1608 1660 propagatedBuildInputs = with self; [];
1609 1661 src = fetchurl {
1610 1662 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1611 1663 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1612 1664 };
1613 1665 meta = {
1614 1666 license = [ pkgs.lib.licenses.mit ];
1615 1667 };
1616 1668 };
1617 1669 pyzmq = super.buildPythonPackage {
1618 1670 name = "pyzmq-14.6.0";
1619 1671 buildInputs = with self; [];
1620 1672 doCheck = false;
1621 1673 propagatedBuildInputs = with self; [];
1622 1674 src = fetchurl {
1623 1675 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1624 1676 md5 = "395b5de95a931afa5b14c9349a5b8024";
1625 1677 };
1626 1678 meta = {
1627 1679 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1628 1680 };
1629 1681 };
1630 1682 recaptcha-client = super.buildPythonPackage {
1631 1683 name = "recaptcha-client-1.0.6";
1632 1684 buildInputs = with self; [];
1633 1685 doCheck = false;
1634 1686 propagatedBuildInputs = with self; [];
1635 1687 src = fetchurl {
1636 1688 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1637 1689 md5 = "74228180f7e1fb76c4d7089160b0d919";
1638 1690 };
1639 1691 meta = {
1640 1692 license = [ { fullName = "MIT/X11"; } ];
1641 1693 };
1642 1694 };
1643 1695 repoze.lru = super.buildPythonPackage {
1644 1696 name = "repoze.lru-0.6";
1645 1697 buildInputs = with self; [];
1646 1698 doCheck = false;
1647 1699 propagatedBuildInputs = with self; [];
1648 1700 src = fetchurl {
1649 1701 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1650 1702 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1651 1703 };
1652 1704 meta = {
1653 1705 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1654 1706 };
1655 1707 };
1656 1708 requests = super.buildPythonPackage {
1657 1709 name = "requests-2.9.1";
1658 1710 buildInputs = with self; [];
1659 1711 doCheck = false;
1660 1712 propagatedBuildInputs = with self; [];
1661 1713 src = fetchurl {
1662 1714 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1663 1715 md5 = "0b7f480d19012ec52bab78292efd976d";
1664 1716 };
1665 1717 meta = {
1666 1718 license = [ pkgs.lib.licenses.asl20 ];
1667 1719 };
1668 1720 };
1669 1721 rhodecode-enterprise-ce = super.buildPythonPackage {
1670 1722 name = "rhodecode-enterprise-ce-4.9.0";
1671 1723 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
1672 1724 doCheck = true;
1673 1725 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress cssselect celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu lxml msgpack-python nbconvert packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1674 1726 src = ./.;
1675 1727 meta = {
1676 1728 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1677 1729 };
1678 1730 };
1679 1731 rhodecode-tools = super.buildPythonPackage {
1680 1732 name = "rhodecode-tools-0.12.0";
1681 1733 buildInputs = with self; [];
1682 1734 doCheck = false;
1683 1735 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh];
1684 1736 src = fetchurl {
1685 1737 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4";
1686 1738 md5 = "9ca040356fa7e38d3f64529a4cffdca4";
1687 1739 };
1688 1740 meta = {
1689 1741 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1690 1742 };
1691 1743 };
1692 1744 scandir = super.buildPythonPackage {
1693 1745 name = "scandir-1.5";
1694 1746 buildInputs = with self; [];
1695 1747 doCheck = false;
1696 1748 propagatedBuildInputs = with self; [];
1697 1749 src = fetchurl {
1698 1750 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
1699 1751 md5 = "a2713043de681bba6b084be42e7a8a44";
1700 1752 };
1701 1753 meta = {
1702 1754 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1703 1755 };
1704 1756 };
1705 1757 setproctitle = super.buildPythonPackage {
1706 1758 name = "setproctitle-1.1.8";
1707 1759 buildInputs = with self; [];
1708 1760 doCheck = false;
1709 1761 propagatedBuildInputs = with self; [];
1710 1762 src = fetchurl {
1711 1763 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1712 1764 md5 = "728f4c8c6031bbe56083a48594027edd";
1713 1765 };
1714 1766 meta = {
1715 1767 license = [ pkgs.lib.licenses.bsdOriginal ];
1716 1768 };
1717 1769 };
1718 1770 setuptools = super.buildPythonPackage {
1719 1771 name = "setuptools-30.1.0";
1720 1772 buildInputs = with self; [];
1721 1773 doCheck = false;
1722 1774 propagatedBuildInputs = with self; [];
1723 1775 src = fetchurl {
1724 1776 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
1725 1777 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
1726 1778 };
1727 1779 meta = {
1728 1780 license = [ pkgs.lib.licenses.mit ];
1729 1781 };
1730 1782 };
1731 1783 setuptools-scm = super.buildPythonPackage {
1732 1784 name = "setuptools-scm-1.15.0";
1733 1785 buildInputs = with self; [];
1734 1786 doCheck = false;
1735 1787 propagatedBuildInputs = with self; [];
1736 1788 src = fetchurl {
1737 1789 url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz";
1738 1790 md5 = "b6916c78ed6253d6602444fad4279c5b";
1739 1791 };
1740 1792 meta = {
1741 1793 license = [ pkgs.lib.licenses.mit ];
1742 1794 };
1743 1795 };
1744 1796 simplegeneric = super.buildPythonPackage {
1745 1797 name = "simplegeneric-0.8.1";
1746 1798 buildInputs = with self; [];
1747 1799 doCheck = false;
1748 1800 propagatedBuildInputs = with self; [];
1749 1801 src = fetchurl {
1750 1802 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1751 1803 md5 = "f9c1fab00fd981be588fc32759f474e3";
1752 1804 };
1753 1805 meta = {
1754 1806 license = [ pkgs.lib.licenses.zpt21 ];
1755 1807 };
1756 1808 };
1757 1809 simplejson = super.buildPythonPackage {
1758 1810 name = "simplejson-3.11.1";
1759 1811 buildInputs = with self; [];
1760 1812 doCheck = false;
1761 1813 propagatedBuildInputs = with self; [];
1762 1814 src = fetchurl {
1763 1815 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
1764 1816 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
1765 1817 };
1766 1818 meta = {
1767 1819 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1768 1820 };
1769 1821 };
1770 1822 six = super.buildPythonPackage {
1771 1823 name = "six-1.9.0";
1772 1824 buildInputs = with self; [];
1773 1825 doCheck = false;
1774 1826 propagatedBuildInputs = with self; [];
1775 1827 src = fetchurl {
1776 1828 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1777 1829 md5 = "476881ef4012262dfc8adc645ee786c4";
1778 1830 };
1779 1831 meta = {
1780 1832 license = [ pkgs.lib.licenses.mit ];
1781 1833 };
1782 1834 };
1783 1835 subprocess32 = super.buildPythonPackage {
1784 1836 name = "subprocess32-3.2.7";
1785 1837 buildInputs = with self; [];
1786 1838 doCheck = false;
1787 1839 propagatedBuildInputs = with self; [];
1788 1840 src = fetchurl {
1789 1841 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
1790 1842 md5 = "824c801e479d3e916879aae3e9c15e16";
1791 1843 };
1792 1844 meta = {
1793 1845 license = [ pkgs.lib.licenses.psfl ];
1794 1846 };
1795 1847 };
1796 1848 supervisor = super.buildPythonPackage {
1797 1849 name = "supervisor-3.3.2";
1798 1850 buildInputs = with self; [];
1799 1851 doCheck = false;
1800 1852 propagatedBuildInputs = with self; [meld3];
1801 1853 src = fetchurl {
1802 1854 url = "https://pypi.python.org/packages/7b/17/88adf8cb25f80e2bc0d18e094fcd7ab300632ea00b601cbbbb84c2419eae/supervisor-3.3.2.tar.gz";
1803 1855 md5 = "04766d62864da13d6a12f7429e75314f";
1804 1856 };
1805 1857 meta = {
1806 1858 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1807 1859 };
1808 1860 };
1809 1861 termcolor = super.buildPythonPackage {
1810 1862 name = "termcolor-1.1.0";
1811 1863 buildInputs = with self; [];
1812 1864 doCheck = false;
1813 1865 propagatedBuildInputs = with self; [];
1814 1866 src = fetchurl {
1815 1867 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
1816 1868 md5 = "043e89644f8909d462fbbfa511c768df";
1817 1869 };
1818 1870 meta = {
1819 1871 license = [ pkgs.lib.licenses.mit ];
1820 1872 };
1821 1873 };
1822 1874 testpath = super.buildPythonPackage {
1823 1875 name = "testpath-0.3.1";
1824 1876 buildInputs = with self; [];
1825 1877 doCheck = false;
1826 1878 propagatedBuildInputs = with self; [];
1827 1879 src = fetchurl {
1828 1880 url = "https://pypi.python.org/packages/f4/8b/b71e9ee10e5f751e9d959bc750ab122ba04187f5aa52aabdc4e63b0e31a7/testpath-0.3.1.tar.gz";
1829 1881 md5 = "2cd5ed5522fda781bb497c9d80ae2fc9";
1830 1882 };
1831 1883 meta = {
1832 1884 license = [ pkgs.lib.licenses.mit ];
1833 1885 };
1834 1886 };
1835 1887 traitlets = super.buildPythonPackage {
1836 1888 name = "traitlets-4.3.2";
1837 1889 buildInputs = with self; [];
1838 1890 doCheck = false;
1839 1891 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
1840 1892 src = fetchurl {
1841 1893 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
1842 1894 md5 = "3068663f2f38fd939a9eb3a500ccc154";
1843 1895 };
1844 1896 meta = {
1845 1897 license = [ pkgs.lib.licenses.bsdOriginal ];
1846 1898 };
1847 1899 };
1848 1900 transifex-client = super.buildPythonPackage {
1849 1901 name = "transifex-client-0.10";
1850 1902 buildInputs = with self; [];
1851 1903 doCheck = false;
1852 1904 propagatedBuildInputs = with self; [];
1853 1905 src = fetchurl {
1854 1906 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1855 1907 md5 = "5549538d84b8eede6b254cd81ae024fa";
1856 1908 };
1857 1909 meta = {
1858 1910 license = [ pkgs.lib.licenses.gpl2 ];
1859 1911 };
1860 1912 };
1861 1913 translationstring = super.buildPythonPackage {
1862 1914 name = "translationstring-1.3";
1863 1915 buildInputs = with self; [];
1864 1916 doCheck = false;
1865 1917 propagatedBuildInputs = with self; [];
1866 1918 src = fetchurl {
1867 1919 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1868 1920 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1869 1921 };
1870 1922 meta = {
1871 1923 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1872 1924 };
1873 1925 };
1874 1926 trollius = super.buildPythonPackage {
1875 1927 name = "trollius-1.0.4";
1876 1928 buildInputs = with self; [];
1877 1929 doCheck = false;
1878 1930 propagatedBuildInputs = with self; [futures];
1879 1931 src = fetchurl {
1880 1932 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1881 1933 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1882 1934 };
1883 1935 meta = {
1884 1936 license = [ pkgs.lib.licenses.asl20 ];
1885 1937 };
1886 1938 };
1887 1939 uWSGI = super.buildPythonPackage {
1888 1940 name = "uWSGI-2.0.15";
1889 1941 buildInputs = with self; [];
1890 1942 doCheck = false;
1891 1943 propagatedBuildInputs = with self; [];
1892 1944 src = fetchurl {
1893 1945 url = "https://pypi.python.org/packages/bb/0a/45e5aa80dc135889594bb371c082d20fb7ee7303b174874c996888cc8511/uwsgi-2.0.15.tar.gz";
1894 1946 md5 = "fc50bd9e83b7602fa474b032167010a7";
1895 1947 };
1896 1948 meta = {
1897 1949 license = [ pkgs.lib.licenses.gpl2 ];
1898 1950 };
1899 1951 };
1900 1952 urllib3 = super.buildPythonPackage {
1901 1953 name = "urllib3-1.16";
1902 1954 buildInputs = with self; [];
1903 1955 doCheck = false;
1904 1956 propagatedBuildInputs = with self; [];
1905 1957 src = fetchurl {
1906 1958 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1907 1959 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1908 1960 };
1909 1961 meta = {
1910 1962 license = [ pkgs.lib.licenses.mit ];
1911 1963 };
1912 1964 };
1913 1965 venusian = super.buildPythonPackage {
1914 1966 name = "venusian-1.1.0";
1915 1967 buildInputs = with self; [];
1916 1968 doCheck = false;
1917 1969 propagatedBuildInputs = with self; [];
1918 1970 src = fetchurl {
1919 1971 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
1920 1972 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
1921 1973 };
1922 1974 meta = {
1923 1975 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1924 1976 };
1925 1977 };
1926 1978 waitress = super.buildPythonPackage {
1927 1979 name = "waitress-1.0.2";
1928 1980 buildInputs = with self; [];
1929 1981 doCheck = false;
1930 1982 propagatedBuildInputs = with self; [];
1931 1983 src = fetchurl {
1932 1984 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
1933 1985 md5 = "b968f39e95d609f6194c6e50425d4bb7";
1934 1986 };
1935 1987 meta = {
1936 1988 license = [ pkgs.lib.licenses.zpt21 ];
1937 1989 };
1938 1990 };
1939 1991 wcwidth = super.buildPythonPackage {
1940 1992 name = "wcwidth-0.1.7";
1941 1993 buildInputs = with self; [];
1942 1994 doCheck = false;
1943 1995 propagatedBuildInputs = with self; [];
1944 1996 src = fetchurl {
1945 1997 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1946 1998 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
1947 1999 };
1948 2000 meta = {
1949 2001 license = [ pkgs.lib.licenses.mit ];
1950 2002 };
1951 2003 };
1952 2004 ws4py = super.buildPythonPackage {
1953 2005 name = "ws4py-0.3.5";
1954 2006 buildInputs = with self; [];
1955 2007 doCheck = false;
1956 2008 propagatedBuildInputs = with self; [];
1957 2009 src = fetchurl {
1958 2010 url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip";
1959 2011 md5 = "a261b75c20b980e55ce7451a3576a867";
1960 2012 };
1961 2013 meta = {
1962 2014 license = [ pkgs.lib.licenses.bsdOriginal ];
1963 2015 };
1964 2016 };
1965 2017 wsgiref = super.buildPythonPackage {
1966 2018 name = "wsgiref-0.1.2";
1967 2019 buildInputs = with self; [];
1968 2020 doCheck = false;
1969 2021 propagatedBuildInputs = with self; [];
1970 2022 src = fetchurl {
1971 2023 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1972 2024 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1973 2025 };
1974 2026 meta = {
1975 2027 license = [ { fullName = "PSF or ZPL"; } ];
1976 2028 };
1977 2029 };
1978 2030 zope.cachedescriptors = super.buildPythonPackage {
1979 2031 name = "zope.cachedescriptors-4.0.0";
1980 2032 buildInputs = with self; [];
1981 2033 doCheck = false;
1982 2034 propagatedBuildInputs = with self; [setuptools];
1983 2035 src = fetchurl {
1984 2036 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1985 2037 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1986 2038 };
1987 2039 meta = {
1988 2040 license = [ pkgs.lib.licenses.zpt21 ];
1989 2041 };
1990 2042 };
1991 2043 zope.deprecation = super.buildPythonPackage {
1992 2044 name = "zope.deprecation-4.1.2";
1993 2045 buildInputs = with self; [];
1994 2046 doCheck = false;
1995 2047 propagatedBuildInputs = with self; [setuptools];
1996 2048 src = fetchurl {
1997 2049 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1998 2050 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1999 2051 };
2000 2052 meta = {
2001 2053 license = [ pkgs.lib.licenses.zpt21 ];
2002 2054 };
2003 2055 };
2004 2056 zope.event = super.buildPythonPackage {
2005 2057 name = "zope.event-4.0.3";
2006 2058 buildInputs = with self; [];
2007 2059 doCheck = false;
2008 2060 propagatedBuildInputs = with self; [setuptools];
2009 2061 src = fetchurl {
2010 2062 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
2011 2063 md5 = "9a3780916332b18b8b85f522bcc3e249";
2012 2064 };
2013 2065 meta = {
2014 2066 license = [ pkgs.lib.licenses.zpt21 ];
2015 2067 };
2016 2068 };
2017 2069 zope.interface = super.buildPythonPackage {
2018 2070 name = "zope.interface-4.1.3";
2019 2071 buildInputs = with self; [];
2020 2072 doCheck = false;
2021 2073 propagatedBuildInputs = with self; [setuptools];
2022 2074 src = fetchurl {
2023 2075 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
2024 2076 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
2025 2077 };
2026 2078 meta = {
2027 2079 license = [ pkgs.lib.licenses.zpt21 ];
2028 2080 };
2029 2081 };
2030 2082
2031 2083 ### Test requirements
2032 2084
2033 2085
2034 2086 }
@@ -1,136 +1,136 b''
1 1 ## core
2 2 setuptools==30.1.0
3 3 setuptools-scm==1.15.0
4 4
5 5 amqplib==1.0.2
6 6 anyjson==0.3.3
7 7 authomatic==0.1.0.post1
8 8 Babel==1.3
9 9 backport-ipaddress==0.1
10 10 Beaker==1.9.0
11 11 celery==2.2.10
12 12 Chameleon==2.24
13 13 channelstream==0.5.2
14 14 click==5.1
15 15 colander==1.3.3
16 16 configobj==5.0.6
17 17 cssselect==1.0.1
18 18 decorator==4.0.11
19 19 deform==2.0.4
20 20 docutils==0.13.1
21 21 dogpile.cache==0.6.4
22 22 dogpile.core==0.4.1
23 23 ecdsa==0.11
24 24 FormEncode==1.2.4
25 25 future==0.14.3
26 26 futures==3.0.2
27 27 gnureadline==6.3.3
28 28 infrae.cache==1.0.1
29 29 iso8601==0.1.11
30 30 itsdangerous==0.24
31 31 Jinja2==2.7.3
32 32 kombu==1.5.1
33 33 lxml==3.7.3
34 34 Mako==1.0.6
35 35 Markdown==2.6.8
36 36 MarkupSafe==0.23
37 37 meld3==1.0.2
38 38 msgpack-python==0.4.8
39 39 MySQL-python==1.2.5
40 40 nose==1.3.6
41 41 objgraph==3.1.0
42 42 packaging==15.2
43 43 paramiko==1.15.1
44 44 Paste==2.0.3
45 45 PasteDeploy==1.5.2
46 46 PasteScript==1.7.5
47 47 pathlib2==2.3.0
48 48 psutil==4.3.1
49 49 psycopg2==2.7.1
50 50 py-bcrypt==0.4
51 51 pycrypto==2.6.1
52 52 pycurl==7.19.5
53 53 pyflakes==0.8.1
54 54 pygments-markdown-lexer==0.1.0.dev39
55 55 Pygments==2.2.0
56 56 pyparsing==1.5.7
57 57 pyramid-beaker==0.8
58 58 pyramid-debugtoolbar==3.0.5
59 59 pyramid-jinja2==2.5
60 60 pyramid-mako==1.0.2
61 pyramid==1.7.4
61 pyramid==1.9.0
62 62 pysqlite==2.8.3
63 63 python-dateutil==2.1
64 64 python-ldap==2.4.40
65 65 python-memcached==1.58
66 66 python-pam==1.8.2
67 67 pytz==2015.4
68 68 pyzmq==14.6.0
69 69 recaptcha-client==1.0.6
70 70 repoze.lru==0.6
71 71 requests==2.9.1
72 72 Routes==1.13
73 73 setproctitle==1.1.8
74 74 simplejson==3.11.1
75 75 six==1.9.0
76 76 Sphinx==1.2.2
77 77 SQLAlchemy==0.9.9
78 78 subprocess32==3.2.7
79 79 supervisor==3.3.2
80 80 Tempita==0.5.2
81 81 translationstring==1.3
82 82 trollius==1.0.4
83 83 urllib3==1.16
84 84 URLObject==2.4.0
85 85 venusian==1.1.0
86 86 WebError==0.10.3
87 87 WebHelpers2==2.0
88 88 WebHelpers==1.3
89 WebOb==1.3.1
89 WebOb==1.7.3
90 90 Whoosh==2.7.4
91 91 wsgiref==0.1.2
92 92 zope.cachedescriptors==4.0.0
93 93 zope.deprecation==4.1.2
94 94 zope.event==4.0.3
95 95 zope.interface==4.1.3
96 96
97 97 ## customized/patched libs
98 98 # our patched version of Pylons==1.0.2
99 99 https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1
100 100 # not released py-gfm==0.1.3
101 101 https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1
102 102
103 103 # IPYTHON RENDERING
104 104 # entrypoints backport, pypi version doesn't support egg installs
105 105 https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313#egg=entrypoints==0.2.2.rhodecode-upstream1
106 106 nbconvert==5.1.1
107 107 nbformat==4.3.0
108 108 jupyter_client==5.0.0
109 109
110 110 ## cli tools
111 111 alembic==0.9.2
112 112 invoke==0.13.0
113 113 bumpversion==0.5.3
114 114 transifex-client==0.10
115 115
116 116 ## http servers
117 117 gevent==1.2.2
118 118 greenlet==0.4.12
119 119 gunicorn==19.7.1
120 120 waitress==1.0.2
121 121 uWSGI==2.0.15
122 122
123 123 ## debug
124 124 ipdb==0.10.3
125 125 ipython==5.1.0
126 126 CProfileV==1.0.7
127 127 bottle==0.12.8
128 128
129 129 ## rhodecode-tools, special case
130 130 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4#egg=rhodecode-tools==0.12.0
131 131
132 132 ## appenlight
133 133 appenlight-client==0.6.21
134 134
135 135 ## test related requirements
136 136 -r requirements_test.txt
@@ -1,15 +1,15 b''
1 1 # test related requirements
2 2 pytest==3.1.2
3 3 py==1.4.34
4 4 pytest-cov==2.5.1
5 5 pytest-sugar==0.8.0
6 6 pytest-runner==2.11.1
7 7 pytest-catchlog==1.2.2
8 8 pytest-profiling==1.2.6
9 9 gprof2dot==2016.10.13
10 10 pytest-timeout==1.2.0
11 11
12 12 mock==1.0.1
13 WebTest==1.4.3
13 WebTest==2.0.27
14 14 cov-core==1.15.0
15 15 coverage==3.7.1
@@ -1,533 +1,525 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons middleware initialization
23 23 """
24 24 import logging
25 25 from collections import OrderedDict
26 26
27 27 from paste.registry import RegistryManager
28 28 from paste.gzipper import make_gzip_middleware
29 29 from pylons.wsgiapp import PylonsApp
30 30 from pyramid.authorization import ACLAuthorizationPolicy
31 31 from pyramid.config import Configurator
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.httpexceptions import (
35 35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound)
36 36 from pyramid.events import ApplicationCreated
37 37 from pyramid.renderers import render_to_response
38 38 from routes.middleware import RoutesMiddleware
39 39 import routes.util
40 40
41 41 import rhodecode
42 42
43 43 from rhodecode.model import meta
44 44 from rhodecode.config import patches
45 45 from rhodecode.config.routing import STATIC_FILE_PREFIX
46 46 from rhodecode.config.environment import (
47 47 load_environment, load_pyramid_environment)
48 48
49 49 from rhodecode.lib.vcs import VCSCommunicationError
50 50 from rhodecode.lib.exceptions import VCSServerUnavailable
51 51 from rhodecode.lib.middleware import csrf
52 52 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
53 53 from rhodecode.lib.middleware.error_handling import (
54 54 PylonsErrorHandlingMiddleware)
55 55 from rhodecode.lib.middleware.https_fixup import HttpsFixup
56 56 from rhodecode.lib.middleware.vcs import VCSMiddleware
57 57 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
58 58 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
59 59 from rhodecode.subscribers import (
60 60 scan_repositories_if_enabled, write_js_routes_if_enabled,
61 61 write_metadata_if_needed)
62 62
63 63
64 64 log = logging.getLogger(__name__)
65 65
66 66
67 67 # this is used to avoid avoid the route lookup overhead in routesmiddleware
68 68 # for certain routes which won't go to pylons to - eg. static files, debugger
69 69 # it is only needed for the pylons migration and can be removed once complete
70 70 class SkippableRoutesMiddleware(RoutesMiddleware):
71 71 """ Routes middleware that allows you to skip prefixes """
72 72
73 73 def __init__(self, *args, **kw):
74 74 self.skip_prefixes = kw.pop('skip_prefixes', [])
75 75 super(SkippableRoutesMiddleware, self).__init__(*args, **kw)
76 76
77 77 def __call__(self, environ, start_response):
78 78 for prefix in self.skip_prefixes:
79 79 if environ['PATH_INFO'].startswith(prefix):
80 80 # added to avoid the case when a missing /_static route falls
81 81 # through to pylons and causes an exception as pylons is
82 82 # expecting wsgiorg.routingargs to be set in the environ
83 83 # by RoutesMiddleware.
84 84 if 'wsgiorg.routing_args' not in environ:
85 85 environ['wsgiorg.routing_args'] = (None, {})
86 86 return self.app(environ, start_response)
87 87
88 88 return super(SkippableRoutesMiddleware, self).__call__(
89 89 environ, start_response)
90 90
91 91
92 92 def make_app(global_conf, static_files=True, **app_conf):
93 93 """Create a Pylons WSGI application and return it
94 94
95 95 ``global_conf``
96 96 The inherited configuration for this application. Normally from
97 97 the [DEFAULT] section of the Paste ini file.
98 98
99 99 ``app_conf``
100 100 The application's local configuration. Normally specified in
101 101 the [app:<name>] section of the Paste ini file (where <name>
102 102 defaults to main).
103 103
104 104 """
105 105 # Apply compatibility patches
106 106 patches.kombu_1_5_1_python_2_7_11()
107 107 patches.inspect_getargspec()
108 108
109 109 # Configure the Pylons environment
110 110 config = load_environment(global_conf, app_conf)
111 111
112 112 # The Pylons WSGI app
113 113 app = PylonsApp(config=config)
114 if rhodecode.is_test:
115 app = csrf.CSRFDetector(app)
116
117 expected_origin = config.get('expected_origin')
118 if expected_origin:
119 # The API can be accessed from other Origins.
120 app = csrf.OriginChecker(app, expected_origin,
121 skip_urls=[routes.util.url_for('api')])
122 114
123 115 # Establish the Registry for this application
124 116 app = RegistryManager(app)
125 117
126 118 app.config = config
127 119
128 120 return app
129 121
130 122
131 123 def make_pyramid_app(global_config, **settings):
132 124 """
133 125 Constructs the WSGI application based on Pyramid and wraps the Pylons based
134 126 application.
135 127
136 128 Specials:
137 129
138 130 * We migrate from Pylons to Pyramid. While doing this, we keep both
139 131 frameworks functional. This involves moving some WSGI middlewares around
140 132 and providing access to some data internals, so that the old code is
141 133 still functional.
142 134
143 135 * The application can also be integrated like a plugin via the call to
144 136 `includeme`. This is accompanied with the other utility functions which
145 137 are called. Changing this should be done with great care to not break
146 138 cases when these fragments are assembled from another place.
147 139
148 140 """
149 141 # The edition string should be available in pylons too, so we add it here
150 142 # before copying the settings.
151 143 settings.setdefault('rhodecode.edition', 'Community Edition')
152 144
153 145 # As long as our Pylons application does expect "unprepared" settings, make
154 146 # sure that we keep an unmodified copy. This avoids unintentional change of
155 147 # behavior in the old application.
156 148 settings_pylons = settings.copy()
157 149
158 150 sanitize_settings_and_apply_defaults(settings)
159 151 config = Configurator(settings=settings)
160 152 add_pylons_compat_data(config.registry, global_config, settings_pylons)
161 153
162 154 load_pyramid_environment(global_config, settings)
163 155
164 156 includeme_first(config)
165 157 includeme(config)
166 158 pyramid_app = config.make_wsgi_app()
167 159 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
168 160 pyramid_app.config = config
169 161
170 162 # creating the app uses a connection - return it after we are done
171 163 meta.Session.remove()
172 164
173 165 return pyramid_app
174 166
175 167
176 168 def make_not_found_view(config):
177 169 """
178 170 This creates the view which should be registered as not-found-view to
179 171 pyramid. Basically it contains of the old pylons app, converted to a view.
180 172 Additionally it is wrapped by some other middlewares.
181 173 """
182 174 settings = config.registry.settings
183 175 vcs_server_enabled = settings['vcs.server.enable']
184 176
185 177 # Make pylons app from unprepared settings.
186 178 pylons_app = make_app(
187 179 config.registry._pylons_compat_global_config,
188 180 **config.registry._pylons_compat_settings)
189 181 config.registry._pylons_compat_config = pylons_app.config
190 182
191 183 # Appenlight monitoring.
192 184 pylons_app, appenlight_client = wrap_in_appenlight_if_enabled(
193 185 pylons_app, settings)
194 186
195 187 # The pylons app is executed inside of the pyramid 404 exception handler.
196 188 # Exceptions which are raised inside of it are not handled by pyramid
197 189 # again. Therefore we add a middleware that invokes the error handler in
198 190 # case of an exception or error response. This way we return proper error
199 191 # HTML pages in case of an error.
200 192 reraise = (settings.get('debugtoolbar.enabled', False) or
201 193 rhodecode.disable_error_handler)
202 194 pylons_app = PylonsErrorHandlingMiddleware(
203 195 pylons_app, error_handler, reraise)
204 196
205 197 # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a
206 198 # view to handle the request. Therefore it is wrapped around the pylons
207 199 # app. It has to be outside of the error handling otherwise error responses
208 200 # from the vcsserver are converted to HTML error pages. This confuses the
209 201 # command line tools and the user won't get a meaningful error message.
210 202 if vcs_server_enabled:
211 203 pylons_app = VCSMiddleware(
212 204 pylons_app, settings, appenlight_client, registry=config.registry)
213 205
214 206 # Convert WSGI app to pyramid view and return it.
215 207 return wsgiapp(pylons_app)
216 208
217 209
218 210 def add_pylons_compat_data(registry, global_config, settings):
219 211 """
220 212 Attach data to the registry to support the Pylons integration.
221 213 """
222 214 registry._pylons_compat_global_config = global_config
223 215 registry._pylons_compat_settings = settings
224 216
225 217
226 218 def error_handler(exception, request):
227 219 import rhodecode
228 220 from rhodecode.lib import helpers
229 221
230 222 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
231 223
232 224 base_response = HTTPInternalServerError()
233 225 # prefer original exception for the response since it may have headers set
234 226 if isinstance(exception, HTTPException):
235 227 base_response = exception
236 228 elif isinstance(exception, VCSCommunicationError):
237 229 base_response = VCSServerUnavailable()
238 230
239 231 def is_http_error(response):
240 232 # error which should have traceback
241 233 return response.status_code > 499
242 234
243 235 if is_http_error(base_response):
244 236 log.exception(
245 237 'error occurred handling this request for path: %s', request.path)
246 238
247 239 c = AttributeDict()
248 240 c.error_message = base_response.status
249 241 c.error_explanation = base_response.explanation or str(base_response)
250 242 c.visual = AttributeDict()
251 243
252 244 c.visual.rhodecode_support_url = (
253 245 request.registry.settings.get('rhodecode_support_url') or
254 246 request.route_url('rhodecode_support')
255 247 )
256 248 c.redirect_time = 0
257 249 c.rhodecode_name = rhodecode_title
258 250 if not c.rhodecode_name:
259 251 c.rhodecode_name = 'Rhodecode'
260 252
261 253 c.causes = []
262 254 if hasattr(base_response, 'causes'):
263 255 c.causes = base_response.causes
264 256 c.messages = helpers.flash.pop_messages()
265 257
266 258 response = render_to_response(
267 259 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
268 260 response=base_response)
269 261
270 262 return response
271 263
272 264
273 265 def includeme(config):
274 266 settings = config.registry.settings
275 267
276 268 # plugin information
277 269 config.registry.rhodecode_plugins = OrderedDict()
278 270
279 271 config.add_directive(
280 272 'register_rhodecode_plugin', register_rhodecode_plugin)
281 273
282 274 if asbool(settings.get('appenlight', 'false')):
283 275 config.include('appenlight_client.ext.pyramid_tween')
284 276
285 277 # Includes which are required. The application would fail without them.
286 278 config.include('pyramid_mako')
287 279 config.include('pyramid_beaker')
288 280
289 281 config.include('rhodecode.authentication')
290 282 config.include('rhodecode.integrations')
291 283
292 284 # apps
293 285 config.include('rhodecode.apps._base')
294 286 config.include('rhodecode.apps.ops')
295 287
296 288 config.include('rhodecode.apps.admin')
297 289 config.include('rhodecode.apps.channelstream')
298 290 config.include('rhodecode.apps.login')
299 291 config.include('rhodecode.apps.home')
300 292 config.include('rhodecode.apps.repository')
301 293 config.include('rhodecode.apps.repo_group')
302 294 config.include('rhodecode.apps.search')
303 295 config.include('rhodecode.apps.user_profile')
304 296 config.include('rhodecode.apps.my_account')
305 297 config.include('rhodecode.apps.svn_support')
306 298 config.include('rhodecode.apps.gist')
307 299
308 300 config.include('rhodecode.apps.debug_style')
309 301 config.include('rhodecode.tweens')
310 302 config.include('rhodecode.api')
311 303
312 304 config.add_route(
313 305 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
314 306
315 307 config.add_translation_dirs('rhodecode:i18n/')
316 308 settings['default_locale_name'] = settings.get('lang', 'en')
317 309
318 310 # Add subscribers.
319 311 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
320 312 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
321 313 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
322 314
323 315 config.add_request_method(
324 316 'rhodecode.lib.partial_renderer.get_partial_renderer',
325 317 'get_partial_renderer')
326 318
327 319 # events
328 320 # TODO(marcink): this should be done when pyramid migration is finished
329 321 # config.add_subscriber(
330 322 # 'rhodecode.integrations.integrations_event_handler',
331 323 # 'rhodecode.events.RhodecodeEvent')
332 324
333 325 # Set the authorization policy.
334 326 authz_policy = ACLAuthorizationPolicy()
335 327 config.set_authorization_policy(authz_policy)
336 328
337 329 # Set the default renderer for HTML templates to mako.
338 330 config.add_mako_renderer('.html')
339 331
340 332 config.add_renderer(
341 333 name='json_ext',
342 334 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
343 335
344 336 # include RhodeCode plugins
345 337 includes = aslist(settings.get('rhodecode.includes', []))
346 338 for inc in includes:
347 339 config.include(inc)
348 340
349 341 # This is the glue which allows us to migrate in chunks. By registering the
350 342 # pylons based application as the "Not Found" view in Pyramid, we will
351 343 # fallback to the old application each time the new one does not yet know
352 344 # how to handle a request.
353 345 config.add_notfound_view(make_not_found_view(config))
354 346
355 347 if not settings.get('debugtoolbar.enabled', False):
356 348 # if no toolbar, then any exception gets caught and rendered
357 349 config.add_view(error_handler, context=Exception)
358 350
359 351 config.add_view(error_handler, context=HTTPError)
360 352
361 353
362 354 def includeme_first(config):
363 355 # redirect automatic browser favicon.ico requests to correct place
364 356 def favicon_redirect(context, request):
365 357 return HTTPFound(
366 358 request.static_path('rhodecode:public/images/favicon.ico'))
367 359
368 360 config.add_view(favicon_redirect, route_name='favicon')
369 361 config.add_route('favicon', '/favicon.ico')
370 362
371 363 def robots_redirect(context, request):
372 364 return HTTPFound(
373 365 request.static_path('rhodecode:public/robots.txt'))
374 366
375 367 config.add_view(robots_redirect, route_name='robots')
376 368 config.add_route('robots', '/robots.txt')
377 369
378 370 config.add_static_view(
379 371 '_static/deform', 'deform:static')
380 372 config.add_static_view(
381 373 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
382 374
383 375
384 376 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
385 377 """
386 378 Apply outer WSGI middlewares around the application.
387 379
388 380 Part of this has been moved up from the Pylons layer, so that the
389 381 data is also available if old Pylons code is hit through an already ported
390 382 view.
391 383 """
392 384 settings = config.registry.settings
393 385
394 386 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
395 387 pyramid_app = HttpsFixup(pyramid_app, settings)
396 388
397 389 # Add RoutesMiddleware to support the pylons compatibility tween during
398 390 # migration to pyramid.
399 391 pyramid_app = SkippableRoutesMiddleware(
400 392 pyramid_app, config.registry._pylons_compat_config['routes.map'],
401 393 skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar'))
402 394
403 395 pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings)
404 396
405 397 if settings['gzip_responses']:
406 398 pyramid_app = make_gzip_middleware(
407 399 pyramid_app, settings, compress_level=1)
408 400
409 401 # this should be the outer most middleware in the wsgi stack since
410 402 # middleware like Routes make database calls
411 403 def pyramid_app_with_cleanup(environ, start_response):
412 404 try:
413 405 return pyramid_app(environ, start_response)
414 406 finally:
415 407 # Dispose current database session and rollback uncommitted
416 408 # transactions.
417 409 meta.Session.remove()
418 410
419 411 # In a single threaded mode server, on non sqlite db we should have
420 412 # '0 Current Checked out connections' at the end of a request,
421 413 # if not, then something, somewhere is leaving a connection open
422 414 pool = meta.Base.metadata.bind.engine.pool
423 415 log.debug('sa pool status: %s', pool.status())
424 416
425 417 return pyramid_app_with_cleanup
426 418
427 419
428 420 def sanitize_settings_and_apply_defaults(settings):
429 421 """
430 422 Applies settings defaults and does all type conversion.
431 423
432 424 We would move all settings parsing and preparation into this place, so that
433 425 we have only one place left which deals with this part. The remaining parts
434 426 of the application would start to rely fully on well prepared settings.
435 427
436 428 This piece would later be split up per topic to avoid a big fat monster
437 429 function.
438 430 """
439 431
440 432 # Pyramid's mako renderer has to search in the templates folder so that the
441 433 # old templates still work. Ported and new templates are expected to use
442 434 # real asset specifications for the includes.
443 435 mako_directories = settings.setdefault('mako.directories', [
444 436 # Base templates of the original Pylons application
445 437 'rhodecode:templates',
446 438 ])
447 439 log.debug(
448 440 "Using the following Mako template directories: %s",
449 441 mako_directories)
450 442
451 443 # Default includes, possible to change as a user
452 444 pyramid_includes = settings.setdefault('pyramid.includes', [
453 445 'rhodecode.lib.middleware.request_wrapper',
454 446 ])
455 447 log.debug(
456 448 "Using the following pyramid.includes: %s",
457 449 pyramid_includes)
458 450
459 451 # TODO: johbo: Re-think this, usually the call to config.include
460 452 # should allow to pass in a prefix.
461 453 settings.setdefault('rhodecode.api.url', '/_admin/api')
462 454
463 455 # Sanitize generic settings.
464 456 _list_setting(settings, 'default_encoding', 'UTF-8')
465 457 _bool_setting(settings, 'is_test', 'false')
466 458 _bool_setting(settings, 'gzip_responses', 'false')
467 459
468 460 # Call split out functions that sanitize settings for each topic.
469 461 _sanitize_appenlight_settings(settings)
470 462 _sanitize_vcs_settings(settings)
471 463
472 464 return settings
473 465
474 466
475 467 def _sanitize_appenlight_settings(settings):
476 468 _bool_setting(settings, 'appenlight', 'false')
477 469
478 470
479 471 def _sanitize_vcs_settings(settings):
480 472 """
481 473 Applies settings defaults and does type conversion for all VCS related
482 474 settings.
483 475 """
484 476 _string_setting(settings, 'vcs.svn.compatible_version', '')
485 477 _string_setting(settings, 'git_rev_filter', '--all')
486 478 _string_setting(settings, 'vcs.hooks.protocol', 'http')
487 479 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
488 480 _string_setting(settings, 'vcs.server', '')
489 481 _string_setting(settings, 'vcs.server.log_level', 'debug')
490 482 _string_setting(settings, 'vcs.server.protocol', 'http')
491 483 _bool_setting(settings, 'startup.import_repos', 'false')
492 484 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
493 485 _bool_setting(settings, 'vcs.server.enable', 'true')
494 486 _bool_setting(settings, 'vcs.start_server', 'false')
495 487 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
496 488 _int_setting(settings, 'vcs.connection_timeout', 3600)
497 489
498 490 # Support legacy values of vcs.scm_app_implementation. Legacy
499 491 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
500 492 # which is now mapped to 'http'.
501 493 scm_app_impl = settings['vcs.scm_app_implementation']
502 494 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
503 495 settings['vcs.scm_app_implementation'] = 'http'
504 496
505 497
506 498 def _int_setting(settings, name, default):
507 499 settings[name] = int(settings.get(name, default))
508 500
509 501
510 502 def _bool_setting(settings, name, default):
511 503 input = settings.get(name, default)
512 504 if isinstance(input, unicode):
513 505 input = input.encode('utf8')
514 506 settings[name] = asbool(input)
515 507
516 508
517 509 def _list_setting(settings, name, default):
518 510 raw_value = settings.get(name, default)
519 511
520 512 old_separator = ','
521 513 if old_separator in raw_value:
522 514 # If we get a comma separated list, pass it to our own function.
523 515 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
524 516 else:
525 517 # Otherwise we assume it uses pyramids space/newline separation.
526 518 settings[name] = aslist(raw_value)
527 519
528 520
529 521 def _string_setting(settings, name, default, lower=True):
530 522 value = settings.get(name, default)
531 523 if lower:
532 524 value = value.lower()
533 525 settings[name] = value
@@ -1,2023 +1,2026 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26 import inspect
27 27 import collections
28 28 import fnmatch
29 29 import hashlib
30 30 import itertools
31 31 import logging
32 32 import random
33 33 import traceback
34 34 from functools import wraps
35 35
36 36 import ipaddress
37 37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
38 38 from pylons.i18n.translation import _
39 39 # NOTE(marcink): this has to be removed only after pyramid migration,
40 40 # replace with _ = request.translate
41 41 from sqlalchemy.orm.exc import ObjectDeletedError
42 42 from sqlalchemy.orm import joinedload
43 43 from zope.cachedescriptors.property import Lazy as LazyProperty
44 44
45 45 import rhodecode
46 46 from rhodecode.model import meta
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.user import UserModel
49 49 from rhodecode.model.db import (
50 50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 51 UserIpMap, UserApiKeys, RepoGroup)
52 52 from rhodecode.lib import caches
53 53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
54 54 from rhodecode.lib.utils import (
55 55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 56 from rhodecode.lib.caching_query import FromCache
57 57
58 58
59 59 if rhodecode.is_unix:
60 60 import bcrypt
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 csrf_token_key = "csrf_token"
65 65
66 66
67 67 class PasswordGenerator(object):
68 68 """
69 69 This is a simple class for generating password from different sets of
70 70 characters
71 71 usage::
72 72
73 73 passwd_gen = PasswordGenerator()
74 74 #print 8-letter password containing only big and small letters
75 75 of alphabet
76 76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 77 """
78 78 ALPHABETS_NUM = r'''1234567890'''
79 79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88 88
89 89 def __init__(self, passwd=''):
90 90 self.passwd = passwd
91 91
92 92 def gen_password(self, length, type_=None):
93 93 if type_ is None:
94 94 type_ = self.ALPHABETS_FULL
95 95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
96 96 return self.passwd
97 97
98 98
99 99 class _RhodeCodeCryptoBase(object):
100 100 ENC_PREF = None
101 101
102 102 def hash_create(self, str_):
103 103 """
104 104 hash the string using
105 105
106 106 :param str_: password to hash
107 107 """
108 108 raise NotImplementedError
109 109
110 110 def hash_check_with_upgrade(self, password, hashed):
111 111 """
112 112 Returns tuple in which first element is boolean that states that
113 113 given password matches it's hashed version, and the second is new hash
114 114 of the password, in case this password should be migrated to new
115 115 cipher.
116 116 """
117 117 checked_hash = self.hash_check(password, hashed)
118 118 return checked_hash, None
119 119
120 120 def hash_check(self, password, hashed):
121 121 """
122 122 Checks matching password with it's hashed value.
123 123
124 124 :param password: password
125 125 :param hashed: password in hashed form
126 126 """
127 127 raise NotImplementedError
128 128
129 129 def _assert_bytes(self, value):
130 130 """
131 131 Passing in an `unicode` object can lead to hard to detect issues
132 132 if passwords contain non-ascii characters. Doing a type check
133 133 during runtime, so that such mistakes are detected early on.
134 134 """
135 135 if not isinstance(value, str):
136 136 raise TypeError(
137 137 "Bytestring required as input, got %r." % (value, ))
138 138
139 139
140 140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 141 ENC_PREF = ('$2a$10', '$2b$10')
142 142
143 143 def hash_create(self, str_):
144 144 self._assert_bytes(str_)
145 145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146 146
147 147 def hash_check_with_upgrade(self, password, hashed):
148 148 """
149 149 Returns tuple in which first element is boolean that states that
150 150 given password matches it's hashed version, and the second is new hash
151 151 of the password, in case this password should be migrated to new
152 152 cipher.
153 153
154 154 This implements special upgrade logic which works like that:
155 155 - check if the given password == bcrypted hash, if yes then we
156 156 properly used password and it was already in bcrypt. Proceed
157 157 without any changes
158 158 - if bcrypt hash check is not working try with sha256. If hash compare
159 159 is ok, it means we using correct but old hashed password. indicate
160 160 hash change and proceed
161 161 """
162 162
163 163 new_hash = None
164 164
165 165 # regular pw check
166 166 password_match_bcrypt = self.hash_check(password, hashed)
167 167
168 168 # now we want to know if the password was maybe from sha256
169 169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 170 if not password_match_bcrypt:
171 171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 172 new_hash = self.hash_create(password) # make new bcrypt hash
173 173 password_match_bcrypt = True
174 174
175 175 return password_match_bcrypt, new_hash
176 176
177 177 def hash_check(self, password, hashed):
178 178 """
179 179 Checks matching password with it's hashed value.
180 180
181 181 :param password: password
182 182 :param hashed: password in hashed form
183 183 """
184 184 self._assert_bytes(password)
185 185 try:
186 186 return bcrypt.hashpw(password, hashed) == hashed
187 187 except ValueError as e:
188 188 # we're having a invalid salt here probably, we should not crash
189 189 # just return with False as it would be a wrong password.
190 190 log.debug('Failed to check password hash using bcrypt %s',
191 191 safe_str(e))
192 192
193 193 return False
194 194
195 195
196 196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 197 ENC_PREF = '_'
198 198
199 199 def hash_create(self, str_):
200 200 self._assert_bytes(str_)
201 201 return hashlib.sha256(str_).hexdigest()
202 202
203 203 def hash_check(self, password, hashed):
204 204 """
205 205 Checks matching password with it's hashed value.
206 206
207 207 :param password: password
208 208 :param hashed: password in hashed form
209 209 """
210 210 self._assert_bytes(password)
211 211 return hashlib.sha256(password).hexdigest() == hashed
212 212
213 213
214 214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
215 215 ENC_PREF = '_'
216 216
217 217 def hash_create(self, str_):
218 218 self._assert_bytes(str_)
219 219 return hashlib.md5(str_).hexdigest()
220 220
221 221 def hash_check(self, password, hashed):
222 222 """
223 223 Checks matching password with it's hashed value.
224 224
225 225 :param password: password
226 226 :param hashed: password in hashed form
227 227 """
228 228 self._assert_bytes(password)
229 229 return hashlib.md5(password).hexdigest() == hashed
230 230
231 231
232 232 def crypto_backend():
233 233 """
234 234 Return the matching crypto backend.
235 235
236 236 Selection is based on if we run tests or not, we pick md5 backend to run
237 237 tests faster since BCRYPT is expensive to calculate
238 238 """
239 239 if rhodecode.is_test:
240 240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
241 241 else:
242 242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243 243
244 244 return RhodeCodeCrypto
245 245
246 246
247 247 def get_crypt_password(password):
248 248 """
249 249 Create the hash of `password` with the active crypto backend.
250 250
251 251 :param password: The cleartext password.
252 252 :type password: unicode
253 253 """
254 254 password = safe_str(password)
255 255 return crypto_backend().hash_create(password)
256 256
257 257
258 258 def check_password(password, hashed):
259 259 """
260 260 Check if the value in `password` matches the hash in `hashed`.
261 261
262 262 :param password: The cleartext password.
263 263 :type password: unicode
264 264
265 265 :param hashed: The expected hashed version of the password.
266 266 :type hashed: The hash has to be passed in in text representation.
267 267 """
268 268 password = safe_str(password)
269 269 return crypto_backend().hash_check(password, hashed)
270 270
271 271
272 272 def generate_auth_token(data, salt=None):
273 273 """
274 274 Generates API KEY from given string
275 275 """
276 276
277 277 if salt is None:
278 278 salt = os.urandom(16)
279 279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
280 280
281 281
282 282 class CookieStoreWrapper(object):
283 283
284 284 def __init__(self, cookie_store):
285 285 self.cookie_store = cookie_store
286 286
287 287 def __repr__(self):
288 288 return 'CookieStore<%s>' % (self.cookie_store)
289 289
290 290 def get(self, key, other=None):
291 291 if isinstance(self.cookie_store, dict):
292 292 return self.cookie_store.get(key, other)
293 293 elif isinstance(self.cookie_store, AuthUser):
294 294 return self.cookie_store.__dict__.get(key, other)
295 295
296 296
297 297 def _cached_perms_data(user_id, scope, user_is_admin,
298 298 user_inherit_default_permissions, explicit, algo):
299 299
300 300 permissions = PermissionCalculator(
301 301 user_id, scope, user_is_admin, user_inherit_default_permissions,
302 302 explicit, algo)
303 303 return permissions.calculate()
304 304
305 305
306 306 class PermOrigin(object):
307 307 ADMIN = 'superadmin'
308 308
309 309 REPO_USER = 'user:%s'
310 310 REPO_USERGROUP = 'usergroup:%s'
311 311 REPO_OWNER = 'repo.owner'
312 312 REPO_DEFAULT = 'repo.default'
313 313 REPO_PRIVATE = 'repo.private'
314 314
315 315 REPOGROUP_USER = 'user:%s'
316 316 REPOGROUP_USERGROUP = 'usergroup:%s'
317 317 REPOGROUP_OWNER = 'group.owner'
318 318 REPOGROUP_DEFAULT = 'group.default'
319 319
320 320 USERGROUP_USER = 'user:%s'
321 321 USERGROUP_USERGROUP = 'usergroup:%s'
322 322 USERGROUP_OWNER = 'usergroup.owner'
323 323 USERGROUP_DEFAULT = 'usergroup.default'
324 324
325 325
326 326 class PermOriginDict(dict):
327 327 """
328 328 A special dict used for tracking permissions along with their origins.
329 329
330 330 `__setitem__` has been overridden to expect a tuple(perm, origin)
331 331 `__getitem__` will return only the perm
332 332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
333 333
334 334 >>> perms = PermOriginDict()
335 335 >>> perms['resource'] = 'read', 'default'
336 336 >>> perms['resource']
337 337 'read'
338 338 >>> perms['resource'] = 'write', 'admin'
339 339 >>> perms['resource']
340 340 'write'
341 341 >>> perms.perm_origin_stack
342 342 {'resource': [('read', 'default'), ('write', 'admin')]}
343 343 """
344 344
345 345 def __init__(self, *args, **kw):
346 346 dict.__init__(self, *args, **kw)
347 347 self.perm_origin_stack = {}
348 348
349 349 def __setitem__(self, key, (perm, origin)):
350 350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
351 351 dict.__setitem__(self, key, perm)
352 352
353 353
354 354 class PermissionCalculator(object):
355 355
356 356 def __init__(
357 357 self, user_id, scope, user_is_admin,
358 358 user_inherit_default_permissions, explicit, algo):
359 359 self.user_id = user_id
360 360 self.user_is_admin = user_is_admin
361 361 self.inherit_default_permissions = user_inherit_default_permissions
362 362 self.explicit = explicit
363 363 self.algo = algo
364 364
365 365 scope = scope or {}
366 366 self.scope_repo_id = scope.get('repo_id')
367 367 self.scope_repo_group_id = scope.get('repo_group_id')
368 368 self.scope_user_group_id = scope.get('user_group_id')
369 369
370 370 self.default_user_id = User.get_default_user(cache=True).user_id
371 371
372 372 self.permissions_repositories = PermOriginDict()
373 373 self.permissions_repository_groups = PermOriginDict()
374 374 self.permissions_user_groups = PermOriginDict()
375 375 self.permissions_global = set()
376 376
377 377 self.default_repo_perms = Permission.get_default_repo_perms(
378 378 self.default_user_id, self.scope_repo_id)
379 379 self.default_repo_groups_perms = Permission.get_default_group_perms(
380 380 self.default_user_id, self.scope_repo_group_id)
381 381 self.default_user_group_perms = \
382 382 Permission.get_default_user_group_perms(
383 383 self.default_user_id, self.scope_user_group_id)
384 384
385 385 def calculate(self):
386 386 if self.user_is_admin:
387 387 return self._admin_permissions()
388 388
389 389 self._calculate_global_default_permissions()
390 390 self._calculate_global_permissions()
391 391 self._calculate_default_permissions()
392 392 self._calculate_repository_permissions()
393 393 self._calculate_repository_group_permissions()
394 394 self._calculate_user_group_permissions()
395 395 return self._permission_structure()
396 396
397 397 def _admin_permissions(self):
398 398 """
399 399 admin user have all default rights for repositories
400 400 and groups set to admin
401 401 """
402 402 self.permissions_global.add('hg.admin')
403 403 self.permissions_global.add('hg.create.write_on_repogroup.true')
404 404
405 405 # repositories
406 406 for perm in self.default_repo_perms:
407 407 r_k = perm.UserRepoToPerm.repository.repo_name
408 408 p = 'repository.admin'
409 409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
410 410
411 411 # repository groups
412 412 for perm in self.default_repo_groups_perms:
413 413 rg_k = perm.UserRepoGroupToPerm.group.group_name
414 414 p = 'group.admin'
415 415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
416 416
417 417 # user groups
418 418 for perm in self.default_user_group_perms:
419 419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
420 420 p = 'usergroup.admin'
421 421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
422 422
423 423 return self._permission_structure()
424 424
425 425 def _calculate_global_default_permissions(self):
426 426 """
427 427 global permissions taken from the default user
428 428 """
429 429 default_global_perms = UserToPerm.query()\
430 430 .filter(UserToPerm.user_id == self.default_user_id)\
431 431 .options(joinedload(UserToPerm.permission))
432 432
433 433 for perm in default_global_perms:
434 434 self.permissions_global.add(perm.permission.permission_name)
435 435
436 436 def _calculate_global_permissions(self):
437 437 """
438 438 Set global system permissions with user permissions or permissions
439 439 taken from the user groups of the current user.
440 440
441 441 The permissions include repo creating, repo group creating, forking
442 442 etc.
443 443 """
444 444
445 445 # now we read the defined permissions and overwrite what we have set
446 446 # before those can be configured from groups or users explicitly.
447 447
448 448 # TODO: johbo: This seems to be out of sync, find out the reason
449 449 # for the comment below and update it.
450 450
451 451 # In case we want to extend this list we should be always in sync with
452 452 # User.DEFAULT_USER_PERMISSIONS definitions
453 453 _configurable = frozenset([
454 454 'hg.fork.none', 'hg.fork.repository',
455 455 'hg.create.none', 'hg.create.repository',
456 456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
457 457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
458 458 'hg.create.write_on_repogroup.false',
459 459 'hg.create.write_on_repogroup.true',
460 460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
461 461 ])
462 462
463 463 # USER GROUPS comes first user group global permissions
464 464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
465 465 .options(joinedload(UserGroupToPerm.permission))\
466 466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
467 467 UserGroupMember.users_group_id))\
468 468 .filter(UserGroupMember.user_id == self.user_id)\
469 469 .order_by(UserGroupToPerm.users_group_id)\
470 470 .all()
471 471
472 472 # need to group here by groups since user can be in more than
473 473 # one group, so we get all groups
474 474 _explicit_grouped_perms = [
475 475 [x, list(y)] for x, y in
476 476 itertools.groupby(user_perms_from_users_groups,
477 477 lambda _x: _x.users_group)]
478 478
479 479 for gr, perms in _explicit_grouped_perms:
480 480 # since user can be in multiple groups iterate over them and
481 481 # select the lowest permissions first (more explicit)
482 482 # TODO: marcink: do this^^
483 483
484 484 # group doesn't inherit default permissions so we actually set them
485 485 if not gr.inherit_default_permissions:
486 486 # NEED TO IGNORE all previously set configurable permissions
487 487 # and replace them with explicitly set from this user
488 488 # group permissions
489 489 self.permissions_global = self.permissions_global.difference(
490 490 _configurable)
491 491 for perm in perms:
492 492 self.permissions_global.add(perm.permission.permission_name)
493 493
494 494 # user explicit global permissions
495 495 user_perms = Session().query(UserToPerm)\
496 496 .options(joinedload(UserToPerm.permission))\
497 497 .filter(UserToPerm.user_id == self.user_id).all()
498 498
499 499 if not self.inherit_default_permissions:
500 500 # NEED TO IGNORE all configurable permissions and
501 501 # replace them with explicitly set from this user permissions
502 502 self.permissions_global = self.permissions_global.difference(
503 503 _configurable)
504 504 for perm in user_perms:
505 505 self.permissions_global.add(perm.permission.permission_name)
506 506
507 507 def _calculate_default_permissions(self):
508 508 """
509 509 Set default user permissions for repositories, repository groups
510 510 taken from the default user.
511 511
512 512 Calculate inheritance of object permissions based on what we have now
513 513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
514 514 explicitly set. Inherit is the opposite of .false being there.
515 515
516 516 .. note::
517 517
518 518 the syntax is little bit odd but what we need to check here is
519 519 the opposite of .false permission being in the list so even for
520 520 inconsistent state when both .true/.false is there
521 521 .false is more important
522 522
523 523 """
524 524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
525 525 in self.permissions_global)
526 526
527 527 # defaults for repositories, taken from `default` user permissions
528 528 # on given repo
529 529 for perm in self.default_repo_perms:
530 530 r_k = perm.UserRepoToPerm.repository.repo_name
531 531 o = PermOrigin.REPO_DEFAULT
532 532 if perm.Repository.private and not (
533 533 perm.Repository.user_id == self.user_id):
534 534 # disable defaults for private repos,
535 535 p = 'repository.none'
536 536 o = PermOrigin.REPO_PRIVATE
537 537 elif perm.Repository.user_id == self.user_id:
538 538 # set admin if owner
539 539 p = 'repository.admin'
540 540 o = PermOrigin.REPO_OWNER
541 541 else:
542 542 p = perm.Permission.permission_name
543 543 # if we decide this user isn't inheriting permissions from
544 544 # default user we set him to .none so only explicit
545 545 # permissions work
546 546 if not user_inherit_object_permissions:
547 547 p = 'repository.none'
548 548 self.permissions_repositories[r_k] = p, o
549 549
550 550 # defaults for repository groups taken from `default` user permission
551 551 # on given group
552 552 for perm in self.default_repo_groups_perms:
553 553 rg_k = perm.UserRepoGroupToPerm.group.group_name
554 554 o = PermOrigin.REPOGROUP_DEFAULT
555 555 if perm.RepoGroup.user_id == self.user_id:
556 556 # set admin if owner
557 557 p = 'group.admin'
558 558 o = PermOrigin.REPOGROUP_OWNER
559 559 else:
560 560 p = perm.Permission.permission_name
561 561
562 562 # if we decide this user isn't inheriting permissions from default
563 563 # user we set him to .none so only explicit permissions work
564 564 if not user_inherit_object_permissions:
565 565 p = 'group.none'
566 566 self.permissions_repository_groups[rg_k] = p, o
567 567
568 568 # defaults for user groups taken from `default` user permission
569 569 # on given user group
570 570 for perm in self.default_user_group_perms:
571 571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
572 572 o = PermOrigin.USERGROUP_DEFAULT
573 573 if perm.UserGroup.user_id == self.user_id:
574 574 # set admin if owner
575 575 p = 'usergroup.admin'
576 576 o = PermOrigin.USERGROUP_OWNER
577 577 else:
578 578 p = perm.Permission.permission_name
579 579
580 580 # if we decide this user isn't inheriting permissions from default
581 581 # user we set him to .none so only explicit permissions work
582 582 if not user_inherit_object_permissions:
583 583 p = 'usergroup.none'
584 584 self.permissions_user_groups[u_k] = p, o
585 585
586 586 def _calculate_repository_permissions(self):
587 587 """
588 588 Repository permissions for the current user.
589 589
590 590 Check if the user is part of user groups for this repository and
591 591 fill in the permission from it. `_choose_permission` decides of which
592 592 permission should be selected based on selected method.
593 593 """
594 594
595 595 # user group for repositories permissions
596 596 user_repo_perms_from_user_group = Permission\
597 597 .get_default_repo_perms_from_user_group(
598 598 self.user_id, self.scope_repo_id)
599 599
600 600 multiple_counter = collections.defaultdict(int)
601 601 for perm in user_repo_perms_from_user_group:
602 602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
603 603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
604 604 multiple_counter[r_k] += 1
605 605 p = perm.Permission.permission_name
606 606 o = PermOrigin.REPO_USERGROUP % ug_k
607 607
608 608 if perm.Repository.user_id == self.user_id:
609 609 # set admin if owner
610 610 p = 'repository.admin'
611 611 o = PermOrigin.REPO_OWNER
612 612 else:
613 613 if multiple_counter[r_k] > 1:
614 614 cur_perm = self.permissions_repositories[r_k]
615 615 p = self._choose_permission(p, cur_perm)
616 616 self.permissions_repositories[r_k] = p, o
617 617
618 618 # user explicit permissions for repositories, overrides any specified
619 619 # by the group permission
620 620 user_repo_perms = Permission.get_default_repo_perms(
621 621 self.user_id, self.scope_repo_id)
622 622 for perm in user_repo_perms:
623 623 r_k = perm.UserRepoToPerm.repository.repo_name
624 624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
625 625 # set admin if owner
626 626 if perm.Repository.user_id == self.user_id:
627 627 p = 'repository.admin'
628 628 o = PermOrigin.REPO_OWNER
629 629 else:
630 630 p = perm.Permission.permission_name
631 631 if not self.explicit:
632 632 cur_perm = self.permissions_repositories.get(
633 633 r_k, 'repository.none')
634 634 p = self._choose_permission(p, cur_perm)
635 635 self.permissions_repositories[r_k] = p, o
636 636
637 637 def _calculate_repository_group_permissions(self):
638 638 """
639 639 Repository group permissions for the current user.
640 640
641 641 Check if the user is part of user groups for repository groups and
642 642 fill in the permissions from it. `_choose_permmission` decides of which
643 643 permission should be selected based on selected method.
644 644 """
645 645 # user group for repo groups permissions
646 646 user_repo_group_perms_from_user_group = Permission\
647 647 .get_default_group_perms_from_user_group(
648 648 self.user_id, self.scope_repo_group_id)
649 649
650 650 multiple_counter = collections.defaultdict(int)
651 651 for perm in user_repo_group_perms_from_user_group:
652 652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
653 653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
654 654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
655 655 multiple_counter[g_k] += 1
656 656 p = perm.Permission.permission_name
657 657 if perm.RepoGroup.user_id == self.user_id:
658 658 # set admin if owner, even for member of other user group
659 659 p = 'group.admin'
660 660 o = PermOrigin.REPOGROUP_OWNER
661 661 else:
662 662 if multiple_counter[g_k] > 1:
663 663 cur_perm = self.permissions_repository_groups[g_k]
664 664 p = self._choose_permission(p, cur_perm)
665 665 self.permissions_repository_groups[g_k] = p, o
666 666
667 667 # user explicit permissions for repository groups
668 668 user_repo_groups_perms = Permission.get_default_group_perms(
669 669 self.user_id, self.scope_repo_group_id)
670 670 for perm in user_repo_groups_perms:
671 671 rg_k = perm.UserRepoGroupToPerm.group.group_name
672 672 u_k = perm.UserRepoGroupToPerm.user.username
673 673 o = PermOrigin.REPOGROUP_USER % u_k
674 674
675 675 if perm.RepoGroup.user_id == self.user_id:
676 676 # set admin if owner
677 677 p = 'group.admin'
678 678 o = PermOrigin.REPOGROUP_OWNER
679 679 else:
680 680 p = perm.Permission.permission_name
681 681 if not self.explicit:
682 682 cur_perm = self.permissions_repository_groups.get(
683 683 rg_k, 'group.none')
684 684 p = self._choose_permission(p, cur_perm)
685 685 self.permissions_repository_groups[rg_k] = p, o
686 686
687 687 def _calculate_user_group_permissions(self):
688 688 """
689 689 User group permissions for the current user.
690 690 """
691 691 # user group for user group permissions
692 692 user_group_from_user_group = Permission\
693 693 .get_default_user_group_perms_from_user_group(
694 694 self.user_id, self.scope_user_group_id)
695 695
696 696 multiple_counter = collections.defaultdict(int)
697 697 for perm in user_group_from_user_group:
698 698 g_k = perm.UserGroupUserGroupToPerm\
699 699 .target_user_group.users_group_name
700 700 u_k = perm.UserGroupUserGroupToPerm\
701 701 .user_group.users_group_name
702 702 o = PermOrigin.USERGROUP_USERGROUP % u_k
703 703 multiple_counter[g_k] += 1
704 704 p = perm.Permission.permission_name
705 705
706 706 if perm.UserGroup.user_id == self.user_id:
707 707 # set admin if owner, even for member of other user group
708 708 p = 'usergroup.admin'
709 709 o = PermOrigin.USERGROUP_OWNER
710 710 else:
711 711 if multiple_counter[g_k] > 1:
712 712 cur_perm = self.permissions_user_groups[g_k]
713 713 p = self._choose_permission(p, cur_perm)
714 714 self.permissions_user_groups[g_k] = p, o
715 715
716 716 # user explicit permission for user groups
717 717 user_user_groups_perms = Permission.get_default_user_group_perms(
718 718 self.user_id, self.scope_user_group_id)
719 719 for perm in user_user_groups_perms:
720 720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
721 721 u_k = perm.UserUserGroupToPerm.user.username
722 722 o = PermOrigin.USERGROUP_USER % u_k
723 723
724 724 if perm.UserGroup.user_id == self.user_id:
725 725 # set admin if owner
726 726 p = 'usergroup.admin'
727 727 o = PermOrigin.USERGROUP_OWNER
728 728 else:
729 729 p = perm.Permission.permission_name
730 730 if not self.explicit:
731 731 cur_perm = self.permissions_user_groups.get(
732 732 ug_k, 'usergroup.none')
733 733 p = self._choose_permission(p, cur_perm)
734 734 self.permissions_user_groups[ug_k] = p, o
735 735
736 736 def _choose_permission(self, new_perm, cur_perm):
737 737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
738 738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
739 739 if self.algo == 'higherwin':
740 740 if new_perm_val > cur_perm_val:
741 741 return new_perm
742 742 return cur_perm
743 743 elif self.algo == 'lowerwin':
744 744 if new_perm_val < cur_perm_val:
745 745 return new_perm
746 746 return cur_perm
747 747
748 748 def _permission_structure(self):
749 749 return {
750 750 'global': self.permissions_global,
751 751 'repositories': self.permissions_repositories,
752 752 'repositories_groups': self.permissions_repository_groups,
753 753 'user_groups': self.permissions_user_groups,
754 754 }
755 755
756 756
757 757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
758 758 """
759 759 Check if given controller_name is in whitelist of auth token access
760 760 """
761 761 if not whitelist:
762 762 from rhodecode import CONFIG
763 763 whitelist = aslist(
764 764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
765 765 log.debug(
766 766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
767 767
768 768 auth_token_access_valid = False
769 769 for entry in whitelist:
770 770 if fnmatch.fnmatch(controller_name, entry):
771 771 auth_token_access_valid = True
772 772 break
773 773
774 774 if auth_token_access_valid:
775 775 log.debug('controller:%s matches entry in whitelist'
776 776 % (controller_name,))
777 777 else:
778 778 msg = ('controller: %s does *NOT* match any entry in whitelist'
779 779 % (controller_name,))
780 780 if auth_token:
781 781 # if we use auth token key and don't have access it's a warning
782 782 log.warning(msg)
783 783 else:
784 784 log.debug(msg)
785 785
786 786 return auth_token_access_valid
787 787
788 788
789 789 class AuthUser(object):
790 790 """
791 791 A simple object that handles all attributes of user in RhodeCode
792 792
793 793 It does lookup based on API key,given user, or user present in session
794 794 Then it fills all required information for such user. It also checks if
795 795 anonymous access is enabled and if so, it returns default user as logged in
796 796 """
797 797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
798 798
799 799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
800 800
801 801 self.user_id = user_id
802 802 self._api_key = api_key
803 803
804 804 self.api_key = None
805 805 self.feed_token = ''
806 806 self.username = username
807 807 self.ip_addr = ip_addr
808 808 self.name = ''
809 809 self.lastname = ''
810 810 self.first_name = ''
811 811 self.last_name = ''
812 812 self.email = ''
813 813 self.is_authenticated = False
814 814 self.admin = False
815 815 self.inherit_default_permissions = False
816 816 self.password = ''
817 817
818 818 self.anonymous_user = None # propagated on propagate_data
819 819 self.propagate_data()
820 820 self._instance = None
821 821 self._permissions_scoped_cache = {} # used to bind scoped calculation
822 822
823 823 @LazyProperty
824 824 def permissions(self):
825 825 return self.get_perms(user=self, cache=False)
826 826
827 827 def permissions_with_scope(self, scope):
828 828 """
829 829 Call the get_perms function with scoped data. The scope in that function
830 830 narrows the SQL calls to the given ID of objects resulting in fetching
831 831 Just particular permission we want to obtain. If scope is an empty dict
832 832 then it basically narrows the scope to GLOBAL permissions only.
833 833
834 834 :param scope: dict
835 835 """
836 836 if 'repo_name' in scope:
837 837 obj = Repository.get_by_repo_name(scope['repo_name'])
838 838 if obj:
839 839 scope['repo_id'] = obj.repo_id
840 840 _scope = {
841 841 'repo_id': -1,
842 842 'user_group_id': -1,
843 843 'repo_group_id': -1,
844 844 }
845 845 _scope.update(scope)
846 846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
847 847 _scope.items())))
848 848 if cache_key not in self._permissions_scoped_cache:
849 849 # store in cache to mimic how the @LazyProperty works,
850 850 # the difference here is that we use the unique key calculated
851 851 # from params and values
852 852 res = self.get_perms(user=self, cache=False, scope=_scope)
853 853 self._permissions_scoped_cache[cache_key] = res
854 854 return self._permissions_scoped_cache[cache_key]
855 855
856 856 def get_instance(self):
857 857 return User.get(self.user_id)
858 858
859 859 def update_lastactivity(self):
860 860 if self.user_id:
861 861 User.get(self.user_id).update_lastactivity()
862 862
863 863 def propagate_data(self):
864 864 """
865 865 Fills in user data and propagates values to this instance. Maps fetched
866 866 user attributes to this class instance attributes
867 867 """
868 868 log.debug('starting data propagation for new potential AuthUser')
869 869 user_model = UserModel()
870 870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
871 871 is_user_loaded = False
872 872
873 873 # lookup by userid
874 874 if self.user_id is not None and self.user_id != anon_user.user_id:
875 875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
876 876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
877 877
878 878 # try go get user by api key
879 879 elif self._api_key and self._api_key != anon_user.api_key:
880 880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
881 881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
882 882
883 883 # lookup by username
884 884 elif self.username:
885 885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
886 886 is_user_loaded = user_model.fill_data(self, username=self.username)
887 887 else:
888 888 log.debug('No data in %s that could been used to log in' % self)
889 889
890 890 if not is_user_loaded:
891 891 log.debug('Failed to load user. Fallback to default user')
892 892 # if we cannot authenticate user try anonymous
893 893 if anon_user.active:
894 894 user_model.fill_data(self, user_id=anon_user.user_id)
895 895 # then we set this user is logged in
896 896 self.is_authenticated = True
897 897 else:
898 898 # in case of disabled anonymous user we reset some of the
899 899 # parameters so such user is "corrupted", skipping the fill_data
900 900 for attr in ['user_id', 'username', 'admin', 'active']:
901 901 setattr(self, attr, None)
902 902 self.is_authenticated = False
903 903
904 904 if not self.username:
905 905 self.username = 'None'
906 906
907 907 log.debug('Auth User is now %s' % self)
908 908
909 909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
910 910 cache=False):
911 911 """
912 912 Fills user permission attribute with permissions taken from database
913 913 works for permissions given for repositories, and for permissions that
914 914 are granted to groups
915 915
916 916 :param user: instance of User object from database
917 917 :param explicit: In case there are permissions both for user and a group
918 918 that user is part of, explicit flag will defiine if user will
919 919 explicitly override permissions from group, if it's False it will
920 920 make decision based on the algo
921 921 :param algo: algorithm to decide what permission should be choose if
922 922 it's multiple defined, eg user in two different groups. It also
923 923 decides if explicit flag is turned off how to specify the permission
924 924 for case when user is in a group + have defined separate permission
925 925 """
926 926 user_id = user.user_id
927 927 user_is_admin = user.is_admin
928 928
929 929 # inheritance of global permissions like create repo/fork repo etc
930 930 user_inherit_default_permissions = user.inherit_default_permissions
931 931
932 932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
933 933 compute = caches.conditional_cache(
934 934 'short_term', 'cache_desc',
935 935 condition=cache, func=_cached_perms_data)
936 936 result = compute(user_id, scope, user_is_admin,
937 937 user_inherit_default_permissions, explicit, algo)
938 938
939 939 result_repr = []
940 940 for k in result:
941 941 result_repr.append((k, len(result[k])))
942 942
943 943 log.debug('PERMISSION tree computed %s' % (result_repr,))
944 944 return result
945 945
946 946 @property
947 947 def is_default(self):
948 948 return self.username == User.DEFAULT_USER
949 949
950 950 @property
951 951 def is_admin(self):
952 952 return self.admin
953 953
954 954 @property
955 955 def is_user_object(self):
956 956 return self.user_id is not None
957 957
958 958 @property
959 959 def repositories_admin(self):
960 960 """
961 961 Returns list of repositories you're an admin of
962 962 """
963 963 return [
964 964 x[0] for x in self.permissions['repositories'].iteritems()
965 965 if x[1] == 'repository.admin']
966 966
967 967 @property
968 968 def repository_groups_admin(self):
969 969 """
970 970 Returns list of repository groups you're an admin of
971 971 """
972 972 return [
973 973 x[0] for x in self.permissions['repositories_groups'].iteritems()
974 974 if x[1] == 'group.admin']
975 975
976 976 @property
977 977 def user_groups_admin(self):
978 978 """
979 979 Returns list of user groups you're an admin of
980 980 """
981 981 return [
982 982 x[0] for x in self.permissions['user_groups'].iteritems()
983 983 if x[1] == 'usergroup.admin']
984 984
985 985 @property
986 986 def ip_allowed(self):
987 987 """
988 988 Checks if ip_addr used in constructor is allowed from defined list of
989 989 allowed ip_addresses for user
990 990
991 991 :returns: boolean, True if ip is in allowed ip range
992 992 """
993 993 # check IP
994 994 inherit = self.inherit_default_permissions
995 995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
996 996 inherit_from_default=inherit)
997 997 @property
998 998 def personal_repo_group(self):
999 999 return RepoGroup.get_user_personal_repo_group(self.user_id)
1000 1000
1001 1001 @classmethod
1002 1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1003 1003 allowed_ips = AuthUser.get_allowed_ips(
1004 1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1005 1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1006 1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1007 1007 return True
1008 1008 else:
1009 1009 log.info('Access for IP:%s forbidden, '
1010 1010 'not in %s' % (ip_addr, allowed_ips))
1011 1011 return False
1012 1012
1013 1013 def __repr__(self):
1014 1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1015 1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1016 1016
1017 1017 def set_authenticated(self, authenticated=True):
1018 1018 if self.user_id != self.anonymous_user.user_id:
1019 1019 self.is_authenticated = authenticated
1020 1020
1021 1021 def get_cookie_store(self):
1022 1022 return {
1023 1023 'username': self.username,
1024 1024 'password': md5(self.password),
1025 1025 'user_id': self.user_id,
1026 1026 'is_authenticated': self.is_authenticated
1027 1027 }
1028 1028
1029 1029 @classmethod
1030 1030 def from_cookie_store(cls, cookie_store):
1031 1031 """
1032 1032 Creates AuthUser from a cookie store
1033 1033
1034 1034 :param cls:
1035 1035 :param cookie_store:
1036 1036 """
1037 1037 user_id = cookie_store.get('user_id')
1038 1038 username = cookie_store.get('username')
1039 1039 api_key = cookie_store.get('api_key')
1040 1040 return AuthUser(user_id, api_key, username)
1041 1041
1042 1042 @classmethod
1043 1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1044 1044 _set = set()
1045 1045
1046 1046 if inherit_from_default:
1047 1047 default_ips = UserIpMap.query().filter(
1048 1048 UserIpMap.user == User.get_default_user(cache=True))
1049 1049 if cache:
1050 1050 default_ips = default_ips.options(
1051 1051 FromCache("sql_cache_short", "get_user_ips_default"))
1052 1052
1053 1053 # populate from default user
1054 1054 for ip in default_ips:
1055 1055 try:
1056 1056 _set.add(ip.ip_addr)
1057 1057 except ObjectDeletedError:
1058 1058 # since we use heavy caching sometimes it happens that
1059 1059 # we get deleted objects here, we just skip them
1060 1060 pass
1061 1061
1062 1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1063 1063 if cache:
1064 1064 user_ips = user_ips.options(
1065 1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1066 1066
1067 1067 for ip in user_ips:
1068 1068 try:
1069 1069 _set.add(ip.ip_addr)
1070 1070 except ObjectDeletedError:
1071 1071 # since we use heavy caching sometimes it happens that we get
1072 1072 # deleted objects here, we just skip them
1073 1073 pass
1074 1074 return _set or set(['0.0.0.0/0', '::/0'])
1075 1075
1076 1076
1077 1077 def set_available_permissions(config):
1078 1078 """
1079 1079 This function will propagate pylons globals with all available defined
1080 1080 permission given in db. We don't want to check each time from db for new
1081 1081 permissions since adding a new permission also requires application restart
1082 1082 ie. to decorate new views with the newly created permission
1083 1083
1084 1084 :param config: current pylons config instance
1085 1085
1086 1086 """
1087 1087 log.info('getting information about all available permissions')
1088 1088 try:
1089 1089 sa = meta.Session
1090 1090 all_perms = sa.query(Permission).all()
1091 1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1092 1092 except Exception:
1093 1093 log.error(traceback.format_exc())
1094 1094 finally:
1095 1095 meta.Session.remove()
1096 1096
1097 1097
1098 1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1099 1099 """
1100 1100 Return the current authentication token, creating one if one doesn't
1101 1101 already exist and the save_if_missing flag is present.
1102 1102
1103 1103 :param session: pass in the pylons session, else we use the global ones
1104 1104 :param force_new: force to re-generate the token and store it in session
1105 1105 :param save_if_missing: save the newly generated token if it's missing in
1106 1106 session
1107 1107 """
1108 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1109 # from pyramid.csrf import get_csrf_token
1110
1108 1111 if not session:
1109 1112 from pylons import session
1110 1113
1111 1114 if (csrf_token_key not in session and save_if_missing) or force_new:
1112 1115 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1113 1116 session[csrf_token_key] = token
1114 1117 if hasattr(session, 'save'):
1115 1118 session.save()
1116 1119 return session.get(csrf_token_key)
1117 1120
1118 1121
1119 1122 def get_request(perm_class):
1120 1123 from pyramid.threadlocal import get_current_request
1121 1124 pyramid_request = get_current_request()
1122 1125 if not pyramid_request:
1123 1126 # return global request of pylons in case pyramid isn't available
1124 1127 # NOTE(marcink): this should be removed after migration to pyramid
1125 1128 from pylons import request
1126 1129 return request
1127 1130 return pyramid_request
1128 1131
1129 1132
1130 1133 # CHECK DECORATORS
1131 1134 class CSRFRequired(object):
1132 1135 """
1133 1136 Decorator for authenticating a form
1134 1137
1135 1138 This decorator uses an authorization token stored in the client's
1136 1139 session for prevention of certain Cross-site request forgery (CSRF)
1137 1140 attacks (See
1138 1141 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1139 1142 information).
1140 1143
1141 1144 For use with the ``webhelpers.secure_form`` helper functions.
1142 1145
1143 1146 """
1144 1147 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1145 1148 except_methods=None):
1146 1149 self.token = token
1147 1150 self.header = header
1148 1151 self.except_methods = except_methods or []
1149 1152
1150 1153 def __call__(self, func):
1151 1154 return get_cython_compat_decorator(self.__wrapper, func)
1152 1155
1153 1156 def _get_csrf(self, _request):
1154 1157 return _request.POST.get(self.token, _request.headers.get(self.header))
1155 1158
1156 1159 def check_csrf(self, _request, cur_token):
1157 1160 supplied_token = self._get_csrf(_request)
1158 1161 return supplied_token and supplied_token == cur_token
1159 1162
1160 1163 def _get_request(self):
1161 1164 return get_request(self)
1162 1165
1163 1166 def __wrapper(self, func, *fargs, **fkwargs):
1164 1167 request = self._get_request()
1165 1168
1166 1169 if request.method in self.except_methods:
1167 1170 return func(*fargs, **fkwargs)
1168 1171
1169 1172 cur_token = get_csrf_token(save_if_missing=False)
1170 1173 if self.check_csrf(request, cur_token):
1171 1174 if request.POST.get(self.token):
1172 1175 del request.POST[self.token]
1173 1176 return func(*fargs, **fkwargs)
1174 1177 else:
1175 1178 reason = 'token-missing'
1176 1179 supplied_token = self._get_csrf(request)
1177 1180 if supplied_token and cur_token != supplied_token:
1178 1181 reason = 'token-mismatch [%s:%s]' % (
1179 1182 cur_token or ''[:6], supplied_token or ''[:6])
1180 1183
1181 1184 csrf_message = \
1182 1185 ("Cross-site request forgery detected, request denied. See "
1183 1186 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1184 1187 "more information.")
1185 1188 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1186 1189 'REMOTE_ADDR:%s, HEADERS:%s' % (
1187 1190 request, reason, request.remote_addr, request.headers))
1188 1191
1189 1192 raise HTTPForbidden(explanation=csrf_message)
1190 1193
1191 1194
1192 1195 class LoginRequired(object):
1193 1196 """
1194 1197 Must be logged in to execute this function else
1195 1198 redirect to login page
1196 1199
1197 1200 :param api_access: if enabled this checks only for valid auth token
1198 1201 and grants access based on valid token
1199 1202 """
1200 1203 def __init__(self, auth_token_access=None):
1201 1204 self.auth_token_access = auth_token_access
1202 1205
1203 1206 def __call__(self, func):
1204 1207 return get_cython_compat_decorator(self.__wrapper, func)
1205 1208
1206 1209 def _get_request(self):
1207 1210 return get_request(self)
1208 1211
1209 1212 def __wrapper(self, func, *fargs, **fkwargs):
1210 1213 from rhodecode.lib import helpers as h
1211 1214 cls = fargs[0]
1212 1215 user = cls._rhodecode_user
1213 1216 request = self._get_request()
1214 1217
1215 1218 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1216 1219 log.debug('Starting login restriction checks for user: %s' % (user,))
1217 1220 # check if our IP is allowed
1218 1221 ip_access_valid = True
1219 1222 if not user.ip_allowed:
1220 1223 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1221 1224 category='warning')
1222 1225 ip_access_valid = False
1223 1226
1224 1227 # check if we used an APIKEY and it's a valid one
1225 1228 # defined white-list of controllers which API access will be enabled
1226 1229 _auth_token = request.GET.get(
1227 1230 'auth_token', '') or request.GET.get('api_key', '')
1228 1231 auth_token_access_valid = allowed_auth_token_access(
1229 1232 loc, auth_token=_auth_token)
1230 1233
1231 1234 # explicit controller is enabled or API is in our whitelist
1232 1235 if self.auth_token_access or auth_token_access_valid:
1233 1236 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1234 1237 db_user = user.get_instance()
1235 1238
1236 1239 if db_user:
1237 1240 if self.auth_token_access:
1238 1241 roles = self.auth_token_access
1239 1242 else:
1240 1243 roles = [UserApiKeys.ROLE_HTTP]
1241 1244 token_match = db_user.authenticate_by_token(
1242 1245 _auth_token, roles=roles)
1243 1246 else:
1244 1247 log.debug('Unable to fetch db instance for auth user: %s', user)
1245 1248 token_match = False
1246 1249
1247 1250 if _auth_token and token_match:
1248 1251 auth_token_access_valid = True
1249 1252 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1250 1253 else:
1251 1254 auth_token_access_valid = False
1252 1255 if not _auth_token:
1253 1256 log.debug("AUTH TOKEN *NOT* present in request")
1254 1257 else:
1255 1258 log.warning(
1256 1259 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1257 1260
1258 1261 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1259 1262 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1260 1263 else 'AUTH_TOKEN_AUTH'
1261 1264
1262 1265 if ip_access_valid and (
1263 1266 user.is_authenticated or auth_token_access_valid):
1264 1267 log.info(
1265 1268 'user %s authenticating with:%s IS authenticated on func %s'
1266 1269 % (user, reason, loc))
1267 1270
1268 1271 # update user data to check last activity
1269 1272 user.update_lastactivity()
1270 1273 Session().commit()
1271 1274 return func(*fargs, **fkwargs)
1272 1275 else:
1273 1276 log.warning(
1274 1277 'user %s authenticating with:%s NOT authenticated on '
1275 1278 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1276 1279 % (user, reason, loc, ip_access_valid,
1277 1280 auth_token_access_valid))
1278 1281 # we preserve the get PARAM
1279 1282 came_from = request.path_qs
1280 1283 log.debug('redirecting to login page with %s' % (came_from,))
1281 1284 raise HTTPFound(
1282 1285 h.route_path('login', _query={'came_from': came_from}))
1283 1286
1284 1287
1285 1288 class NotAnonymous(object):
1286 1289 """
1287 1290 Must be logged in to execute this function else
1288 1291 redirect to login page
1289 1292 """
1290 1293
1291 1294 def __call__(self, func):
1292 1295 return get_cython_compat_decorator(self.__wrapper, func)
1293 1296
1294 1297 def _get_request(self):
1295 1298 return get_request(self)
1296 1299
1297 1300 def __wrapper(self, func, *fargs, **fkwargs):
1298 1301 import rhodecode.lib.helpers as h
1299 1302 cls = fargs[0]
1300 1303 self.user = cls._rhodecode_user
1301 1304 request = self._get_request()
1302 1305
1303 1306 log.debug('Checking if user is not anonymous @%s' % cls)
1304 1307
1305 1308 anonymous = self.user.username == User.DEFAULT_USER
1306 1309
1307 1310 if anonymous:
1308 1311 came_from = request.path_qs
1309 1312 h.flash(_('You need to be a registered user to '
1310 1313 'perform this action'),
1311 1314 category='warning')
1312 1315 raise HTTPFound(
1313 1316 h.route_path('login', _query={'came_from': came_from}))
1314 1317 else:
1315 1318 return func(*fargs, **fkwargs)
1316 1319
1317 1320
1318 1321 class XHRRequired(object):
1319 1322 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1320 1323
1321 1324 def __call__(self, func):
1322 1325 return get_cython_compat_decorator(self.__wrapper, func)
1323 1326
1324 1327 def _get_request(self):
1325 1328 return get_request(self)
1326 1329
1327 1330 def __wrapper(self, func, *fargs, **fkwargs):
1328 1331 from pylons.controllers.util import abort
1329 1332 request = self._get_request()
1330 1333
1331 1334 log.debug('Checking if request is XMLHttpRequest (XHR)')
1332 1335 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1333 1336
1334 1337 if not request.is_xhr:
1335 1338 abort(400, detail=xhr_message)
1336 1339
1337 1340 return func(*fargs, **fkwargs)
1338 1341
1339 1342
1340 1343 class HasAcceptedRepoType(object):
1341 1344 """
1342 1345 Check if requested repo is within given repo type aliases
1343 1346 """
1344 1347
1345 1348 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1346 1349
1347 1350 def __init__(self, *repo_type_list):
1348 1351 self.repo_type_list = set(repo_type_list)
1349 1352
1350 1353 def __call__(self, func):
1351 1354 return get_cython_compat_decorator(self.__wrapper, func)
1352 1355
1353 1356 def __wrapper(self, func, *fargs, **fkwargs):
1354 1357 import rhodecode.lib.helpers as h
1355 1358 cls = fargs[0]
1356 1359 rhodecode_repo = cls.rhodecode_repo
1357 1360
1358 1361 log.debug('%s checking repo type for %s in %s',
1359 1362 self.__class__.__name__,
1360 1363 rhodecode_repo.alias, self.repo_type_list)
1361 1364
1362 1365 if rhodecode_repo.alias in self.repo_type_list:
1363 1366 return func(*fargs, **fkwargs)
1364 1367 else:
1365 1368 h.flash(h.literal(
1366 1369 _('Action not supported for %s.' % rhodecode_repo.alias)),
1367 1370 category='warning')
1368 1371 raise HTTPFound(
1369 1372 h.route_path('repo_summary',
1370 1373 repo_name=cls.rhodecode_db_repo.repo_name))
1371 1374
1372 1375
1373 1376 class PermsDecorator(object):
1374 1377 """
1375 1378 Base class for controller decorators, we extract the current user from
1376 1379 the class itself, which has it stored in base controllers
1377 1380 """
1378 1381
1379 1382 def __init__(self, *required_perms):
1380 1383 self.required_perms = set(required_perms)
1381 1384
1382 1385 def __call__(self, func):
1383 1386 return get_cython_compat_decorator(self.__wrapper, func)
1384 1387
1385 1388 def _get_request(self):
1386 1389 return get_request(self)
1387 1390
1388 1391 def _get_came_from(self):
1389 1392 _request = self._get_request()
1390 1393
1391 1394 # both pylons/pyramid has this attribute
1392 1395 return _request.path_qs
1393 1396
1394 1397 def __wrapper(self, func, *fargs, **fkwargs):
1395 1398 import rhodecode.lib.helpers as h
1396 1399 cls = fargs[0]
1397 1400 _user = cls._rhodecode_user
1398 1401
1399 1402 log.debug('checking %s permissions %s for %s %s',
1400 1403 self.__class__.__name__, self.required_perms, cls, _user)
1401 1404
1402 1405 if self.check_permissions(_user):
1403 1406 log.debug('Permission granted for %s %s', cls, _user)
1404 1407 return func(*fargs, **fkwargs)
1405 1408
1406 1409 else:
1407 1410 log.debug('Permission denied for %s %s', cls, _user)
1408 1411 anonymous = _user.username == User.DEFAULT_USER
1409 1412
1410 1413 if anonymous:
1411 1414 came_from = self._get_came_from()
1412 1415 h.flash(_('You need to be signed in to view this page'),
1413 1416 category='warning')
1414 1417 raise HTTPFound(
1415 1418 h.route_path('login', _query={'came_from': came_from}))
1416 1419
1417 1420 else:
1418 1421 # redirect with 404 to prevent resource discovery
1419 1422 raise HTTPNotFound()
1420 1423
1421 1424 def check_permissions(self, user):
1422 1425 """Dummy function for overriding"""
1423 1426 raise NotImplementedError(
1424 1427 'You have to write this function in child class')
1425 1428
1426 1429
1427 1430 class HasPermissionAllDecorator(PermsDecorator):
1428 1431 """
1429 1432 Checks for access permission for all given predicates. All of them
1430 1433 have to be meet in order to fulfill the request
1431 1434 """
1432 1435
1433 1436 def check_permissions(self, user):
1434 1437 perms = user.permissions_with_scope({})
1435 1438 if self.required_perms.issubset(perms['global']):
1436 1439 return True
1437 1440 return False
1438 1441
1439 1442
1440 1443 class HasPermissionAnyDecorator(PermsDecorator):
1441 1444 """
1442 1445 Checks for access permission for any of given predicates. In order to
1443 1446 fulfill the request any of predicates must be meet
1444 1447 """
1445 1448
1446 1449 def check_permissions(self, user):
1447 1450 perms = user.permissions_with_scope({})
1448 1451 if self.required_perms.intersection(perms['global']):
1449 1452 return True
1450 1453 return False
1451 1454
1452 1455
1453 1456 class HasRepoPermissionAllDecorator(PermsDecorator):
1454 1457 """
1455 1458 Checks for access permission for all given predicates for specific
1456 1459 repository. All of them have to be meet in order to fulfill the request
1457 1460 """
1458 1461 def _get_repo_name(self):
1459 1462 _request = self._get_request()
1460 1463 return get_repo_slug(_request)
1461 1464
1462 1465 def check_permissions(self, user):
1463 1466 perms = user.permissions
1464 1467 repo_name = self._get_repo_name()
1465 1468
1466 1469 try:
1467 1470 user_perms = set([perms['repositories'][repo_name]])
1468 1471 except KeyError:
1469 1472 log.debug('cannot locate repo with name: `%s` in permissions defs',
1470 1473 repo_name)
1471 1474 return False
1472 1475
1473 1476 log.debug('checking `%s` permissions for repo `%s`',
1474 1477 user_perms, repo_name)
1475 1478 if self.required_perms.issubset(user_perms):
1476 1479 return True
1477 1480 return False
1478 1481
1479 1482
1480 1483 class HasRepoPermissionAnyDecorator(PermsDecorator):
1481 1484 """
1482 1485 Checks for access permission for any of given predicates for specific
1483 1486 repository. In order to fulfill the request any of predicates must be meet
1484 1487 """
1485 1488 def _get_repo_name(self):
1486 1489 _request = self._get_request()
1487 1490 return get_repo_slug(_request)
1488 1491
1489 1492 def check_permissions(self, user):
1490 1493 perms = user.permissions
1491 1494 repo_name = self._get_repo_name()
1492 1495
1493 1496 try:
1494 1497 user_perms = set([perms['repositories'][repo_name]])
1495 1498 except KeyError:
1496 1499 log.debug('cannot locate repo with name: `%s` in permissions defs',
1497 1500 repo_name)
1498 1501 return False
1499 1502
1500 1503 log.debug('checking `%s` permissions for repo `%s`',
1501 1504 user_perms, repo_name)
1502 1505 if self.required_perms.intersection(user_perms):
1503 1506 return True
1504 1507 return False
1505 1508
1506 1509
1507 1510 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1508 1511 """
1509 1512 Checks for access permission for all given predicates for specific
1510 1513 repository group. All of them have to be meet in order to
1511 1514 fulfill the request
1512 1515 """
1513 1516 def _get_repo_group_name(self):
1514 1517 _request = self._get_request()
1515 1518 return get_repo_group_slug(_request)
1516 1519
1517 1520 def check_permissions(self, user):
1518 1521 perms = user.permissions
1519 1522 group_name = self._get_repo_group_name()
1520 1523 try:
1521 1524 user_perms = set([perms['repositories_groups'][group_name]])
1522 1525 except KeyError:
1523 1526 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1524 1527 group_name)
1525 1528 return False
1526 1529
1527 1530 log.debug('checking `%s` permissions for repo group `%s`',
1528 1531 user_perms, group_name)
1529 1532 if self.required_perms.issubset(user_perms):
1530 1533 return True
1531 1534 return False
1532 1535
1533 1536
1534 1537 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1535 1538 """
1536 1539 Checks for access permission for any of given predicates for specific
1537 1540 repository group. In order to fulfill the request any
1538 1541 of predicates must be met
1539 1542 """
1540 1543 def _get_repo_group_name(self):
1541 1544 _request = self._get_request()
1542 1545 return get_repo_group_slug(_request)
1543 1546
1544 1547 def check_permissions(self, user):
1545 1548 perms = user.permissions
1546 1549 group_name = self._get_repo_group_name()
1547 1550
1548 1551 try:
1549 1552 user_perms = set([perms['repositories_groups'][group_name]])
1550 1553 except KeyError:
1551 1554 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1552 1555 group_name)
1553 1556 return False
1554 1557
1555 1558 log.debug('checking `%s` permissions for repo group `%s`',
1556 1559 user_perms, group_name)
1557 1560 if self.required_perms.intersection(user_perms):
1558 1561 return True
1559 1562 return False
1560 1563
1561 1564
1562 1565 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1563 1566 """
1564 1567 Checks for access permission for all given predicates for specific
1565 1568 user group. All of them have to be meet in order to fulfill the request
1566 1569 """
1567 1570 def _get_user_group_name(self):
1568 1571 _request = self._get_request()
1569 1572 return get_user_group_slug(_request)
1570 1573
1571 1574 def check_permissions(self, user):
1572 1575 perms = user.permissions
1573 1576 group_name = self._get_user_group_name()
1574 1577 try:
1575 1578 user_perms = set([perms['user_groups'][group_name]])
1576 1579 except KeyError:
1577 1580 return False
1578 1581
1579 1582 if self.required_perms.issubset(user_perms):
1580 1583 return True
1581 1584 return False
1582 1585
1583 1586
1584 1587 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1585 1588 """
1586 1589 Checks for access permission for any of given predicates for specific
1587 1590 user group. In order to fulfill the request any of predicates must be meet
1588 1591 """
1589 1592 def _get_user_group_name(self):
1590 1593 _request = self._get_request()
1591 1594 return get_user_group_slug(_request)
1592 1595
1593 1596 def check_permissions(self, user):
1594 1597 perms = user.permissions
1595 1598 group_name = self._get_user_group_name()
1596 1599 try:
1597 1600 user_perms = set([perms['user_groups'][group_name]])
1598 1601 except KeyError:
1599 1602 return False
1600 1603
1601 1604 if self.required_perms.intersection(user_perms):
1602 1605 return True
1603 1606 return False
1604 1607
1605 1608
1606 1609 # CHECK FUNCTIONS
1607 1610 class PermsFunction(object):
1608 1611 """Base function for other check functions"""
1609 1612
1610 1613 def __init__(self, *perms):
1611 1614 self.required_perms = set(perms)
1612 1615 self.repo_name = None
1613 1616 self.repo_group_name = None
1614 1617 self.user_group_name = None
1615 1618
1616 1619 def __bool__(self):
1617 1620 frame = inspect.currentframe()
1618 1621 stack_trace = traceback.format_stack(frame)
1619 1622 log.error('Checking bool value on a class instance of perm '
1620 1623 'function is not allowed: %s' % ''.join(stack_trace))
1621 1624 # rather than throwing errors, here we always return False so if by
1622 1625 # accident someone checks truth for just an instance it will always end
1623 1626 # up in returning False
1624 1627 return False
1625 1628 __nonzero__ = __bool__
1626 1629
1627 1630 def __call__(self, check_location='', user=None):
1628 1631 if not user:
1629 1632 log.debug('Using user attribute from global request')
1630 1633 # TODO: remove this someday,put as user as attribute here
1631 1634 request = self._get_request()
1632 1635 user = request.user
1633 1636
1634 1637 # init auth user if not already given
1635 1638 if not isinstance(user, AuthUser):
1636 1639 log.debug('Wrapping user %s into AuthUser', user)
1637 1640 user = AuthUser(user.user_id)
1638 1641
1639 1642 cls_name = self.__class__.__name__
1640 1643 check_scope = self._get_check_scope(cls_name)
1641 1644 check_location = check_location or 'unspecified location'
1642 1645
1643 1646 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1644 1647 self.required_perms, user, check_scope, check_location)
1645 1648 if not user:
1646 1649 log.warning('Empty user given for permission check')
1647 1650 return False
1648 1651
1649 1652 if self.check_permissions(user):
1650 1653 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1651 1654 check_scope, user, check_location)
1652 1655 return True
1653 1656
1654 1657 else:
1655 1658 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1656 1659 check_scope, user, check_location)
1657 1660 return False
1658 1661
1659 1662 def _get_request(self):
1660 1663 return get_request(self)
1661 1664
1662 1665 def _get_check_scope(self, cls_name):
1663 1666 return {
1664 1667 'HasPermissionAll': 'GLOBAL',
1665 1668 'HasPermissionAny': 'GLOBAL',
1666 1669 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1667 1670 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1668 1671 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1669 1672 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1670 1673 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1671 1674 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1672 1675 }.get(cls_name, '?:%s' % cls_name)
1673 1676
1674 1677 def check_permissions(self, user):
1675 1678 """Dummy function for overriding"""
1676 1679 raise Exception('You have to write this function in child class')
1677 1680
1678 1681
1679 1682 class HasPermissionAll(PermsFunction):
1680 1683 def check_permissions(self, user):
1681 1684 perms = user.permissions_with_scope({})
1682 1685 if self.required_perms.issubset(perms.get('global')):
1683 1686 return True
1684 1687 return False
1685 1688
1686 1689
1687 1690 class HasPermissionAny(PermsFunction):
1688 1691 def check_permissions(self, user):
1689 1692 perms = user.permissions_with_scope({})
1690 1693 if self.required_perms.intersection(perms.get('global')):
1691 1694 return True
1692 1695 return False
1693 1696
1694 1697
1695 1698 class HasRepoPermissionAll(PermsFunction):
1696 1699 def __call__(self, repo_name=None, check_location='', user=None):
1697 1700 self.repo_name = repo_name
1698 1701 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1699 1702
1700 1703 def _get_repo_name(self):
1701 1704 if not self.repo_name:
1702 1705 _request = self._get_request()
1703 1706 self.repo_name = get_repo_slug(_request)
1704 1707 return self.repo_name
1705 1708
1706 1709 def check_permissions(self, user):
1707 1710 self.repo_name = self._get_repo_name()
1708 1711 perms = user.permissions
1709 1712 try:
1710 1713 user_perms = set([perms['repositories'][self.repo_name]])
1711 1714 except KeyError:
1712 1715 return False
1713 1716 if self.required_perms.issubset(user_perms):
1714 1717 return True
1715 1718 return False
1716 1719
1717 1720
1718 1721 class HasRepoPermissionAny(PermsFunction):
1719 1722 def __call__(self, repo_name=None, check_location='', user=None):
1720 1723 self.repo_name = repo_name
1721 1724 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1722 1725
1723 1726 def _get_repo_name(self):
1724 1727 if not self.repo_name:
1725 1728 _request = self._get_request()
1726 1729 self.repo_name = get_repo_slug(_request)
1727 1730 return self.repo_name
1728 1731
1729 1732 def check_permissions(self, user):
1730 1733 self.repo_name = self._get_repo_name()
1731 1734 perms = user.permissions
1732 1735 try:
1733 1736 user_perms = set([perms['repositories'][self.repo_name]])
1734 1737 except KeyError:
1735 1738 return False
1736 1739 if self.required_perms.intersection(user_perms):
1737 1740 return True
1738 1741 return False
1739 1742
1740 1743
1741 1744 class HasRepoGroupPermissionAny(PermsFunction):
1742 1745 def __call__(self, group_name=None, check_location='', user=None):
1743 1746 self.repo_group_name = group_name
1744 1747 return super(HasRepoGroupPermissionAny, self).__call__(
1745 1748 check_location, user)
1746 1749
1747 1750 def check_permissions(self, user):
1748 1751 perms = user.permissions
1749 1752 try:
1750 1753 user_perms = set(
1751 1754 [perms['repositories_groups'][self.repo_group_name]])
1752 1755 except KeyError:
1753 1756 return False
1754 1757 if self.required_perms.intersection(user_perms):
1755 1758 return True
1756 1759 return False
1757 1760
1758 1761
1759 1762 class HasRepoGroupPermissionAll(PermsFunction):
1760 1763 def __call__(self, group_name=None, check_location='', user=None):
1761 1764 self.repo_group_name = group_name
1762 1765 return super(HasRepoGroupPermissionAll, self).__call__(
1763 1766 check_location, user)
1764 1767
1765 1768 def check_permissions(self, user):
1766 1769 perms = user.permissions
1767 1770 try:
1768 1771 user_perms = set(
1769 1772 [perms['repositories_groups'][self.repo_group_name]])
1770 1773 except KeyError:
1771 1774 return False
1772 1775 if self.required_perms.issubset(user_perms):
1773 1776 return True
1774 1777 return False
1775 1778
1776 1779
1777 1780 class HasUserGroupPermissionAny(PermsFunction):
1778 1781 def __call__(self, user_group_name=None, check_location='', user=None):
1779 1782 self.user_group_name = user_group_name
1780 1783 return super(HasUserGroupPermissionAny, self).__call__(
1781 1784 check_location, user)
1782 1785
1783 1786 def check_permissions(self, user):
1784 1787 perms = user.permissions
1785 1788 try:
1786 1789 user_perms = set([perms['user_groups'][self.user_group_name]])
1787 1790 except KeyError:
1788 1791 return False
1789 1792 if self.required_perms.intersection(user_perms):
1790 1793 return True
1791 1794 return False
1792 1795
1793 1796
1794 1797 class HasUserGroupPermissionAll(PermsFunction):
1795 1798 def __call__(self, user_group_name=None, check_location='', user=None):
1796 1799 self.user_group_name = user_group_name
1797 1800 return super(HasUserGroupPermissionAll, self).__call__(
1798 1801 check_location, user)
1799 1802
1800 1803 def check_permissions(self, user):
1801 1804 perms = user.permissions
1802 1805 try:
1803 1806 user_perms = set([perms['user_groups'][self.user_group_name]])
1804 1807 except KeyError:
1805 1808 return False
1806 1809 if self.required_perms.issubset(user_perms):
1807 1810 return True
1808 1811 return False
1809 1812
1810 1813
1811 1814 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1812 1815 class HasPermissionAnyMiddleware(object):
1813 1816 def __init__(self, *perms):
1814 1817 self.required_perms = set(perms)
1815 1818
1816 1819 def __call__(self, user, repo_name):
1817 1820 # repo_name MUST be unicode, since we handle keys in permission
1818 1821 # dict by unicode
1819 1822 repo_name = safe_unicode(repo_name)
1820 1823 user = AuthUser(user.user_id)
1821 1824 log.debug(
1822 1825 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1823 1826 self.required_perms, user, repo_name)
1824 1827
1825 1828 if self.check_permissions(user, repo_name):
1826 1829 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1827 1830 repo_name, user, 'PermissionMiddleware')
1828 1831 return True
1829 1832
1830 1833 else:
1831 1834 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1832 1835 repo_name, user, 'PermissionMiddleware')
1833 1836 return False
1834 1837
1835 1838 def check_permissions(self, user, repo_name):
1836 1839 perms = user.permissions_with_scope({'repo_name': repo_name})
1837 1840
1838 1841 try:
1839 1842 user_perms = set([perms['repositories'][repo_name]])
1840 1843 except Exception:
1841 1844 log.exception('Error while accessing user permissions')
1842 1845 return False
1843 1846
1844 1847 if self.required_perms.intersection(user_perms):
1845 1848 return True
1846 1849 return False
1847 1850
1848 1851
1849 1852 # SPECIAL VERSION TO HANDLE API AUTH
1850 1853 class _BaseApiPerm(object):
1851 1854 def __init__(self, *perms):
1852 1855 self.required_perms = set(perms)
1853 1856
1854 1857 def __call__(self, check_location=None, user=None, repo_name=None,
1855 1858 group_name=None, user_group_name=None):
1856 1859 cls_name = self.__class__.__name__
1857 1860 check_scope = 'global:%s' % (self.required_perms,)
1858 1861 if repo_name:
1859 1862 check_scope += ', repo_name:%s' % (repo_name,)
1860 1863
1861 1864 if group_name:
1862 1865 check_scope += ', repo_group_name:%s' % (group_name,)
1863 1866
1864 1867 if user_group_name:
1865 1868 check_scope += ', user_group_name:%s' % (user_group_name,)
1866 1869
1867 1870 log.debug(
1868 1871 'checking cls:%s %s %s @ %s'
1869 1872 % (cls_name, self.required_perms, check_scope, check_location))
1870 1873 if not user:
1871 1874 log.debug('Empty User passed into arguments')
1872 1875 return False
1873 1876
1874 1877 # process user
1875 1878 if not isinstance(user, AuthUser):
1876 1879 user = AuthUser(user.user_id)
1877 1880 if not check_location:
1878 1881 check_location = 'unspecified'
1879 1882 if self.check_permissions(user.permissions, repo_name, group_name,
1880 1883 user_group_name):
1881 1884 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1882 1885 check_scope, user, check_location)
1883 1886 return True
1884 1887
1885 1888 else:
1886 1889 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1887 1890 check_scope, user, check_location)
1888 1891 return False
1889 1892
1890 1893 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1891 1894 user_group_name=None):
1892 1895 """
1893 1896 implement in child class should return True if permissions are ok,
1894 1897 False otherwise
1895 1898
1896 1899 :param perm_defs: dict with permission definitions
1897 1900 :param repo_name: repo name
1898 1901 """
1899 1902 raise NotImplementedError()
1900 1903
1901 1904
1902 1905 class HasPermissionAllApi(_BaseApiPerm):
1903 1906 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1904 1907 user_group_name=None):
1905 1908 if self.required_perms.issubset(perm_defs.get('global')):
1906 1909 return True
1907 1910 return False
1908 1911
1909 1912
1910 1913 class HasPermissionAnyApi(_BaseApiPerm):
1911 1914 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1912 1915 user_group_name=None):
1913 1916 if self.required_perms.intersection(perm_defs.get('global')):
1914 1917 return True
1915 1918 return False
1916 1919
1917 1920
1918 1921 class HasRepoPermissionAllApi(_BaseApiPerm):
1919 1922 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1920 1923 user_group_name=None):
1921 1924 try:
1922 1925 _user_perms = set([perm_defs['repositories'][repo_name]])
1923 1926 except KeyError:
1924 1927 log.warning(traceback.format_exc())
1925 1928 return False
1926 1929 if self.required_perms.issubset(_user_perms):
1927 1930 return True
1928 1931 return False
1929 1932
1930 1933
1931 1934 class HasRepoPermissionAnyApi(_BaseApiPerm):
1932 1935 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1933 1936 user_group_name=None):
1934 1937 try:
1935 1938 _user_perms = set([perm_defs['repositories'][repo_name]])
1936 1939 except KeyError:
1937 1940 log.warning(traceback.format_exc())
1938 1941 return False
1939 1942 if self.required_perms.intersection(_user_perms):
1940 1943 return True
1941 1944 return False
1942 1945
1943 1946
1944 1947 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1945 1948 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1946 1949 user_group_name=None):
1947 1950 try:
1948 1951 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1949 1952 except KeyError:
1950 1953 log.warning(traceback.format_exc())
1951 1954 return False
1952 1955 if self.required_perms.intersection(_user_perms):
1953 1956 return True
1954 1957 return False
1955 1958
1956 1959
1957 1960 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1958 1961 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1959 1962 user_group_name=None):
1960 1963 try:
1961 1964 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1962 1965 except KeyError:
1963 1966 log.warning(traceback.format_exc())
1964 1967 return False
1965 1968 if self.required_perms.issubset(_user_perms):
1966 1969 return True
1967 1970 return False
1968 1971
1969 1972
1970 1973 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1971 1974 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1972 1975 user_group_name=None):
1973 1976 try:
1974 1977 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1975 1978 except KeyError:
1976 1979 log.warning(traceback.format_exc())
1977 1980 return False
1978 1981 if self.required_perms.intersection(_user_perms):
1979 1982 return True
1980 1983 return False
1981 1984
1982 1985
1983 1986 def check_ip_access(source_ip, allowed_ips=None):
1984 1987 """
1985 1988 Checks if source_ip is a subnet of any of allowed_ips.
1986 1989
1987 1990 :param source_ip:
1988 1991 :param allowed_ips: list of allowed ips together with mask
1989 1992 """
1990 1993 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1991 1994 source_ip_address = ipaddress.ip_address(source_ip)
1992 1995 if isinstance(allowed_ips, (tuple, list, set)):
1993 1996 for ip in allowed_ips:
1994 1997 try:
1995 1998 network_address = ipaddress.ip_network(ip, strict=False)
1996 1999 if source_ip_address in network_address:
1997 2000 log.debug('IP %s is network %s' %
1998 2001 (source_ip_address, network_address))
1999 2002 return True
2000 2003 # for any case we cannot determine the IP, don't crash just
2001 2004 # skip it and log as error, we want to say forbidden still when
2002 2005 # sending bad IP
2003 2006 except Exception:
2004 2007 log.error(traceback.format_exc())
2005 2008 continue
2006 2009 return False
2007 2010
2008 2011
2009 2012 def get_cython_compat_decorator(wrapper, func):
2010 2013 """
2011 2014 Creates a cython compatible decorator. The previously used
2012 2015 decorator.decorator() function seems to be incompatible with cython.
2013 2016
2014 2017 :param wrapper: __wrapper method of the decorator class
2015 2018 :param func: decorated function
2016 2019 """
2017 2020 @wraps(func)
2018 2021 def local_wrapper(*args, **kwds):
2019 2022 return wrapper(func, *args, **kwds)
2020 2023 local_wrapper.__wrapped__ = func
2021 2024 return local_wrapper
2022 2025
2023 2026
@@ -1,617 +1,631 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 The base Controller API
23 23 Provides the BaseController class for subclassing. And usage in different
24 24 controllers
25 25 """
26 26
27 27 import logging
28 28 import socket
29 29
30 30 import ipaddress
31 31 import pyramid.threadlocal
32 32
33 33 from paste.auth.basic import AuthBasicAuthenticator
34 34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 from pylons import config, tmpl_context as c, request, session, url
36 from pylons import config, tmpl_context as c, request, url
37 37 from pylons.controllers import WSGIController
38 38 from pylons.controllers.util import redirect
39 39 from pylons.i18n import translation
40 40 # marcink: don't remove this import
41 41 from pylons.templating import render_mako as render # noqa
42 42 from pylons.i18n.translation import _
43 43 from webob.exc import HTTPFound
44 44
45 45
46 46 import rhodecode
47 47 from rhodecode.authentication.base import VCS_TYPE
48 48 from rhodecode.lib import auth, utils2
49 49 from rhodecode.lib import helpers as h
50 50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
51 51 from rhodecode.lib.exceptions import UserCreationError
52 52 from rhodecode.lib.utils import (
53 53 get_repo_slug, set_rhodecode_config, password_changed,
54 54 get_enabled_hook_classes)
55 55 from rhodecode.lib.utils2 import (
56 56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
57 57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
58 58 from rhodecode.model import meta
59 59 from rhodecode.model.db import Repository, User, ChangesetComment
60 60 from rhodecode.model.notification import NotificationModel
61 61 from rhodecode.model.scm import ScmModel
62 62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
63 63
64 64
65 65 log = logging.getLogger(__name__)
66 66
67 67
68 68 def _filter_proxy(ip):
69 69 """
70 70 Passed in IP addresses in HEADERS can be in a special format of multiple
71 71 ips. Those comma separated IPs are passed from various proxies in the
72 72 chain of request processing. The left-most being the original client.
73 73 We only care about the first IP which came from the org. client.
74 74
75 75 :param ip: ip string from headers
76 76 """
77 77 if ',' in ip:
78 78 _ips = ip.split(',')
79 79 _first_ip = _ips[0].strip()
80 80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
81 81 return _first_ip
82 82 return ip
83 83
84 84
85 85 def _filter_port(ip):
86 86 """
87 87 Removes a port from ip, there are 4 main cases to handle here.
88 88 - ipv4 eg. 127.0.0.1
89 89 - ipv6 eg. ::1
90 90 - ipv4+port eg. 127.0.0.1:8080
91 91 - ipv6+port eg. [::1]:8080
92 92
93 93 :param ip:
94 94 """
95 95 def is_ipv6(ip_addr):
96 96 if hasattr(socket, 'inet_pton'):
97 97 try:
98 98 socket.inet_pton(socket.AF_INET6, ip_addr)
99 99 except socket.error:
100 100 return False
101 101 else:
102 102 # fallback to ipaddress
103 103 try:
104 104 ipaddress.IPv6Address(ip_addr)
105 105 except Exception:
106 106 return False
107 107 return True
108 108
109 109 if ':' not in ip: # must be ipv4 pure ip
110 110 return ip
111 111
112 112 if '[' in ip and ']' in ip: # ipv6 with port
113 113 return ip.split(']')[0][1:].lower()
114 114
115 115 # must be ipv6 or ipv4 with port
116 116 if is_ipv6(ip):
117 117 return ip
118 118 else:
119 119 ip, _port = ip.split(':')[:2] # means ipv4+port
120 120 return ip
121 121
122 122
123 123 def get_ip_addr(environ):
124 124 proxy_key = 'HTTP_X_REAL_IP'
125 125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
126 126 def_key = 'REMOTE_ADDR'
127 127 _filters = lambda x: _filter_port(_filter_proxy(x))
128 128
129 129 ip = environ.get(proxy_key)
130 130 if ip:
131 131 return _filters(ip)
132 132
133 133 ip = environ.get(proxy_key2)
134 134 if ip:
135 135 return _filters(ip)
136 136
137 137 ip = environ.get(def_key, '0.0.0.0')
138 138 return _filters(ip)
139 139
140 140
141 141 def get_server_ip_addr(environ, log_errors=True):
142 142 hostname = environ.get('SERVER_NAME')
143 143 try:
144 144 return socket.gethostbyname(hostname)
145 145 except Exception as e:
146 146 if log_errors:
147 147 # in some cases this lookup is not possible, and we don't want to
148 148 # make it an exception in logs
149 149 log.exception('Could not retrieve server ip address: %s', e)
150 150 return hostname
151 151
152 152
153 153 def get_server_port(environ):
154 154 return environ.get('SERVER_PORT')
155 155
156 156
157 157 def get_access_path(environ):
158 158 path = environ.get('PATH_INFO')
159 159 org_req = environ.get('pylons.original_request')
160 160 if org_req:
161 161 path = org_req.environ.get('PATH_INFO')
162 162 return path
163 163
164 164
165 165 def get_user_agent(environ):
166 166 return environ.get('HTTP_USER_AGENT')
167 167
168 168
169 169 def vcs_operation_context(
170 170 environ, repo_name, username, action, scm, check_locking=True,
171 171 is_shadow_repo=False):
172 172 """
173 173 Generate the context for a vcs operation, e.g. push or pull.
174 174
175 175 This context is passed over the layers so that hooks triggered by the
176 176 vcs operation know details like the user, the user's IP address etc.
177 177
178 178 :param check_locking: Allows to switch of the computation of the locking
179 179 data. This serves mainly the need of the simplevcs middleware to be
180 180 able to disable this for certain operations.
181 181
182 182 """
183 183 # Tri-state value: False: unlock, None: nothing, True: lock
184 184 make_lock = None
185 185 locked_by = [None, None, None]
186 186 is_anonymous = username == User.DEFAULT_USER
187 187 if not is_anonymous and check_locking:
188 188 log.debug('Checking locking on repository "%s"', repo_name)
189 189 user = User.get_by_username(username)
190 190 repo = Repository.get_by_repo_name(repo_name)
191 191 make_lock, __, locked_by = repo.get_locking_state(
192 192 action, user.user_id)
193 193
194 194 settings_model = VcsSettingsModel(repo=repo_name)
195 195 ui_settings = settings_model.get_ui_settings()
196 196
197 197 extras = {
198 198 'ip': get_ip_addr(environ),
199 199 'username': username,
200 200 'action': action,
201 201 'repository': repo_name,
202 202 'scm': scm,
203 203 'config': rhodecode.CONFIG['__file__'],
204 204 'make_lock': make_lock,
205 205 'locked_by': locked_by,
206 206 'server_url': utils2.get_server_url(environ),
207 207 'user_agent': get_user_agent(environ),
208 208 'hooks': get_enabled_hook_classes(ui_settings),
209 209 'is_shadow_repo': is_shadow_repo,
210 210 }
211 211 return extras
212 212
213 213
214 214 class BasicAuth(AuthBasicAuthenticator):
215 215
216 216 def __init__(self, realm, authfunc, registry, auth_http_code=None,
217 217 initial_call_detection=False, acl_repo_name=None):
218 218 self.realm = realm
219 219 self.initial_call = initial_call_detection
220 220 self.authfunc = authfunc
221 221 self.registry = registry
222 222 self.acl_repo_name = acl_repo_name
223 223 self._rc_auth_http_code = auth_http_code
224 224
225 225 def _get_response_from_code(self, http_code):
226 226 try:
227 227 return get_exception(safe_int(http_code))
228 228 except Exception:
229 229 log.exception('Failed to fetch response for code %s' % http_code)
230 230 return HTTPForbidden
231 231
232 232 def build_authentication(self):
233 233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 234 if self._rc_auth_http_code and not self.initial_call:
235 235 # return alternative HTTP code if alternative http return code
236 236 # is specified in RhodeCode config, but ONLY if it's not the
237 237 # FIRST call
238 238 custom_response_klass = self._get_response_from_code(
239 239 self._rc_auth_http_code)
240 240 return custom_response_klass(headers=head)
241 241 return HTTPUnauthorized(headers=head)
242 242
243 243 def authenticate(self, environ):
244 244 authorization = AUTHORIZATION(environ)
245 245 if not authorization:
246 246 return self.build_authentication()
247 247 (authmeth, auth) = authorization.split(' ', 1)
248 248 if 'basic' != authmeth.lower():
249 249 return self.build_authentication()
250 250 auth = auth.strip().decode('base64')
251 251 _parts = auth.split(':', 1)
252 252 if len(_parts) == 2:
253 253 username, password = _parts
254 254 if self.authfunc(
255 255 username, password, environ, VCS_TYPE,
256 256 registry=self.registry, acl_repo_name=self.acl_repo_name):
257 257 return username
258 258 if username and password:
259 259 # we mark that we actually executed authentication once, at
260 260 # that point we can use the alternative auth code
261 261 self.initial_call = False
262 262
263 263 return self.build_authentication()
264 264
265 265 __call__ = authenticate
266 266
267 267
268 268 def calculate_version_hash():
269 269 return md5(
270 270 config.get('beaker.session.secret', '') +
271 271 rhodecode.__version__)[:8]
272 272
273 273
274 274 def get_current_lang(request):
275 275 # NOTE(marcink): remove after pyramid move
276 276 try:
277 277 return translation.get_lang()[0]
278 278 except:
279 279 pass
280 280
281 281 return getattr(request, '_LOCALE_', None)
282 282
283 283
284 284 def attach_context_attributes(context, request, user_id):
285 285 """
286 286 Attach variables into template context called `c`, please note that
287 287 request could be pylons or pyramid request in here.
288 288 """
289 289 rc_config = SettingsModel().get_all_settings(cache=True)
290 290
291 291 context.rhodecode_version = rhodecode.__version__
292 292 context.rhodecode_edition = config.get('rhodecode.edition')
293 293 # unique secret + version does not leak the version but keep consistency
294 294 context.rhodecode_version_hash = calculate_version_hash()
295 295
296 296 # Default language set for the incoming request
297 297 context.language = get_current_lang(request)
298 298
299 299 # Visual options
300 300 context.visual = AttributeDict({})
301 301
302 302 # DB stored Visual Items
303 303 context.visual.show_public_icon = str2bool(
304 304 rc_config.get('rhodecode_show_public_icon'))
305 305 context.visual.show_private_icon = str2bool(
306 306 rc_config.get('rhodecode_show_private_icon'))
307 307 context.visual.stylify_metatags = str2bool(
308 308 rc_config.get('rhodecode_stylify_metatags'))
309 309 context.visual.dashboard_items = safe_int(
310 310 rc_config.get('rhodecode_dashboard_items', 100))
311 311 context.visual.admin_grid_items = safe_int(
312 312 rc_config.get('rhodecode_admin_grid_items', 100))
313 313 context.visual.repository_fields = str2bool(
314 314 rc_config.get('rhodecode_repository_fields'))
315 315 context.visual.show_version = str2bool(
316 316 rc_config.get('rhodecode_show_version'))
317 317 context.visual.use_gravatar = str2bool(
318 318 rc_config.get('rhodecode_use_gravatar'))
319 319 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
320 320 context.visual.default_renderer = rc_config.get(
321 321 'rhodecode_markup_renderer', 'rst')
322 322 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
323 323 context.visual.rhodecode_support_url = \
324 324 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
325 325
326 326 context.pre_code = rc_config.get('rhodecode_pre_code')
327 327 context.post_code = rc_config.get('rhodecode_post_code')
328 328 context.rhodecode_name = rc_config.get('rhodecode_title')
329 329 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
330 330 # if we have specified default_encoding in the request, it has more
331 331 # priority
332 332 if request.GET.get('default_encoding'):
333 333 context.default_encodings.insert(0, request.GET.get('default_encoding'))
334 334 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
335 335
336 336 # INI stored
337 337 context.labs_active = str2bool(
338 338 config.get('labs_settings_active', 'false'))
339 339 context.visual.allow_repo_location_change = str2bool(
340 340 config.get('allow_repo_location_change', True))
341 341 context.visual.allow_custom_hooks_settings = str2bool(
342 342 config.get('allow_custom_hooks_settings', True))
343 343 context.debug_style = str2bool(config.get('debug_style', False))
344 344
345 345 context.rhodecode_instanceid = config.get('instance_id')
346 346
347 347 context.visual.cut_off_limit_diff = safe_int(
348 348 config.get('cut_off_limit_diff'))
349 349 context.visual.cut_off_limit_file = safe_int(
350 350 config.get('cut_off_limit_file'))
351 351
352 352 # AppEnlight
353 353 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
354 354 context.appenlight_api_public_key = config.get(
355 355 'appenlight.api_public_key', '')
356 356 context.appenlight_server_url = config.get('appenlight.server_url', '')
357 357
358 358 # JS template context
359 359 context.template_context = {
360 360 'repo_name': None,
361 361 'repo_type': None,
362 362 'repo_landing_commit': None,
363 363 'rhodecode_user': {
364 364 'username': None,
365 365 'email': None,
366 366 'notification_status': False
367 367 },
368 368 'visual': {
369 369 'default_renderer': None
370 370 },
371 371 'commit_data': {
372 372 'commit_id': None
373 373 },
374 374 'pull_request_data': {'pull_request_id': None},
375 375 'timeago': {
376 376 'refresh_time': 120 * 1000,
377 377 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
378 378 },
379 379 'pylons_dispatch': {
380 380 # 'controller': request.environ['pylons.routes_dict']['controller'],
381 381 # 'action': request.environ['pylons.routes_dict']['action'],
382 382 },
383 383 'pyramid_dispatch': {
384 384
385 385 },
386 386 'extra': {'plugins': {}}
387 387 }
388 388 # END CONFIG VARS
389 389
390 390 # TODO: This dosn't work when called from pylons compatibility tween.
391 391 # Fix this and remove it from base controller.
392 392 # context.repo_name = get_repo_slug(request) # can be empty
393 393
394 394 diffmode = 'sideside'
395 395 if request.GET.get('diffmode'):
396 396 if request.GET['diffmode'] == 'unified':
397 397 diffmode = 'unified'
398 398 elif request.session.get('diffmode'):
399 399 diffmode = request.session['diffmode']
400 400
401 401 context.diffmode = diffmode
402 402
403 403 if request.session.get('diffmode') != diffmode:
404 404 request.session['diffmode'] = diffmode
405 405
406 context.csrf_token = auth.get_csrf_token()
406 context.csrf_token = auth.get_csrf_token(session=request.session)
407 407 context.backends = rhodecode.BACKENDS.keys()
408 408 context.backends.sort()
409 409 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
410 context.pyramid_request = pyramid.threadlocal.get_current_request()
410
411 # NOTE(marcink): when migrated to pyramid we don't need to set this anymore,
412 # given request will ALWAYS be pyramid one
413 pyramid_request = pyramid.threadlocal.get_current_request()
414 context.pyramid_request = pyramid_request
415
416 # web case
417 if hasattr(pyramid_request, 'user'):
418 context.auth_user = pyramid_request.user
419 context.rhodecode_user = pyramid_request.user
420
421 # api case
422 if hasattr(pyramid_request, 'rpc_user'):
423 context.auth_user = pyramid_request.rpc_user
424 context.rhodecode_user = pyramid_request.rpc_user
411 425
412 426 # attach the whole call context to the request
413 427 request.call_context = context
414 428
415 429
416 430 def get_auth_user(request):
417 431 environ = request.environ
418 432 session = request.session
419 433
420 434 ip_addr = get_ip_addr(environ)
421 435 # make sure that we update permissions each time we call controller
422 436 _auth_token = (request.GET.get('auth_token', '') or
423 437 request.GET.get('api_key', ''))
424 438
425 439 if _auth_token:
426 440 # when using API_KEY we assume user exists, and
427 441 # doesn't need auth based on cookies.
428 442 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
429 443 authenticated = False
430 444 else:
431 445 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
432 446 try:
433 447 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
434 448 ip_addr=ip_addr)
435 449 except UserCreationError as e:
436 450 h.flash(e, 'error')
437 451 # container auth or other auth functions that create users
438 452 # on the fly can throw this exception signaling that there's
439 453 # issue with user creation, explanation should be provided
440 454 # in Exception itself. We then create a simple blank
441 455 # AuthUser
442 456 auth_user = AuthUser(ip_addr=ip_addr)
443 457
444 458 if password_changed(auth_user, session):
445 459 session.invalidate()
446 460 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
447 461 auth_user = AuthUser(ip_addr=ip_addr)
448 462
449 463 authenticated = cookie_store.get('is_authenticated')
450 464
451 465 if not auth_user.is_authenticated and auth_user.is_user_object:
452 466 # user is not authenticated and not empty
453 467 auth_user.set_authenticated(authenticated)
454 468
455 469 return auth_user
456 470
457 471
458 472 class BaseController(WSGIController):
459 473
460 474 def __before__(self):
461 475 """
462 476 __before__ is called before controller methods and after __call__
463 477 """
464 478 # on each call propagate settings calls into global settings.
465 479 set_rhodecode_config(config)
466 attach_context_attributes(c, request, c.rhodecode_user.user_id)
480 attach_context_attributes(c, request, self._rhodecode_user.user_id)
467 481
468 482 # TODO: Remove this when fixed in attach_context_attributes()
469 483 c.repo_name = get_repo_slug(request) # can be empty
470 484
471 485 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
472 486 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
473 487 self.sa = meta.Session
474 488 self.scm_model = ScmModel(self.sa)
475 489
476 490 # set user language
477 491 user_lang = getattr(c.pyramid_request, '_LOCALE_', None)
478 492 if user_lang:
479 493 translation.set_lang(user_lang)
480 494 log.debug('set language to %s for user %s',
481 495 user_lang, self._rhodecode_user)
482 496
483 497 def _dispatch_redirect(self, with_url, environ, start_response):
484 498 resp = HTTPFound(with_url)
485 499 environ['SCRIPT_NAME'] = '' # handle prefix middleware
486 500 environ['PATH_INFO'] = with_url
487 501 return resp(environ, start_response)
488 502
489 503 def __call__(self, environ, start_response):
490 504 """Invoke the Controller"""
491 505 # WSGIController.__call__ dispatches to the Controller method
492 506 # the request is routed to. This routing information is
493 507 # available in environ['pylons.routes_dict']
494 508 from rhodecode.lib import helpers as h
495 509
496 510 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
497 511 if environ.get('debugtoolbar.wants_pylons_context', False):
498 512 environ['debugtoolbar.pylons_context'] = c._current_obj()
499 513
500 514 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
501 515 environ['pylons.routes_dict']['action']])
502 516
503 517 self.rc_config = SettingsModel().get_all_settings(cache=True)
504 518 self.ip_addr = get_ip_addr(environ)
505 519
506 520 # The rhodecode auth user is looked up and passed through the
507 521 # environ by the pylons compatibility tween in pyramid.
508 522 # So we can just grab it from there.
509 523 auth_user = environ['rc_auth_user']
510 524
511 525 # set globals for auth user
512 526 request.user = auth_user
513 c.rhodecode_user = self._rhodecode_user = auth_user
527 self._rhodecode_user = auth_user
514 528
515 529 log.info('IP: %s User: %s accessed %s [%s]' % (
516 530 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
517 531 _route_name)
518 532 )
519 533
520 534 user_obj = auth_user.get_instance()
521 535 if user_obj and user_obj.user_data.get('force_password_change'):
522 536 h.flash('You are required to change your password', 'warning',
523 537 ignore_duplicate=True)
524 538 return self._dispatch_redirect(
525 539 url('my_account_password'), environ, start_response)
526 540
527 541 return WSGIController.__call__(self, environ, start_response)
528 542
529 543
530 544 class BaseRepoController(BaseController):
531 545 """
532 546 Base class for controllers responsible for loading all needed data for
533 547 repository loaded items are
534 548
535 549 c.rhodecode_repo: instance of scm repository
536 550 c.rhodecode_db_repo: instance of db
537 551 c.repository_requirements_missing: shows that repository specific data
538 552 could not be displayed due to the missing requirements
539 553 c.repository_pull_requests: show number of open pull requests
540 554 """
541 555
542 556 def __before__(self):
543 557 super(BaseRepoController, self).__before__()
544 558 if c.repo_name: # extracted from routes
545 559 db_repo = Repository.get_by_repo_name(c.repo_name)
546 560 if not db_repo:
547 561 return
548 562
549 563 log.debug(
550 564 'Found repository in database %s with state `%s`',
551 565 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
552 566 route = getattr(request.environ.get('routes.route'), 'name', '')
553 567
554 568 # allow to delete repos that are somehow damages in filesystem
555 569 if route in ['delete_repo']:
556 570 return
557 571
558 572 if db_repo.repo_state in [Repository.STATE_PENDING]:
559 573 if route in ['repo_creating_home']:
560 574 return
561 575 check_url = url('repo_creating_home', repo_name=c.repo_name)
562 576 return redirect(check_url)
563 577
564 578 self.rhodecode_db_repo = db_repo
565 579
566 580 missing_requirements = False
567 581 try:
568 582 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
569 583 except RepositoryRequirementError as e:
570 584 missing_requirements = True
571 585 self._handle_missing_requirements(e)
572 586
573 587 if self.rhodecode_repo is None and not missing_requirements:
574 588 log.error('%s this repository is present in database but it '
575 589 'cannot be created as an scm instance', c.repo_name)
576 590
577 591 h.flash(_(
578 592 "The repository at %(repo_name)s cannot be located.") %
579 593 {'repo_name': c.repo_name},
580 594 category='error', ignore_duplicate=True)
581 595 redirect(h.route_path('home'))
582 596
583 597 # update last change according to VCS data
584 598 if not missing_requirements:
585 599 commit = db_repo.get_commit(
586 600 pre_load=["author", "date", "message", "parents"])
587 601 db_repo.update_commit_cache(commit)
588 602
589 603 # Prepare context
590 604 c.rhodecode_db_repo = db_repo
591 605 c.rhodecode_repo = self.rhodecode_repo
592 606 c.repository_requirements_missing = missing_requirements
593 607
594 608 self._update_global_counters(self.scm_model, db_repo)
595 609
596 610 def _update_global_counters(self, scm_model, db_repo):
597 611 """
598 612 Base variables that are exposed to every page of repository
599 613 """
600 614 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
601 615
602 616 def _handle_missing_requirements(self, error):
603 617 self.rhodecode_repo = None
604 618 log.error(
605 619 'Requirements are missing for repository %s: %s',
606 620 c.repo_name, error.message)
607 621
608 622 summary_url = h.route_path('repo_summary', repo_name=c.repo_name)
609 623 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
610 624 settings_update_url = url('repo', repo_name=c.repo_name)
611 625 path = request.path
612 626 should_redirect = (
613 627 path not in (summary_url, settings_update_url)
614 628 and '/settings' not in path or path == statistics_url
615 629 )
616 630 if should_redirect:
617 631 redirect(summary_url)
@@ -1,329 +1,331 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import io
21 21 import re
22 22 import datetime
23 23 import logging
24 24 import pylons
25 25 import Queue
26 26 import subprocess32
27 27 import os
28 28
29 29 from pyramid.i18n import get_localizer
30 30 from pyramid.threadlocal import get_current_request
31 31 from pyramid.interfaces import IRoutesMapper
32 32 from pyramid.settings import asbool
33 33 from pyramid.path import AssetResolver
34 34 from threading import Thread
35 35
36 36 from rhodecode.translation import _ as tsf
37 37 from rhodecode.config.jsroutes import generate_jsroutes_content
38 38
39 39 import rhodecode
40 40
41 41 from pylons.i18n.translation import _get_translator
42 42 from pylons.util import ContextObj
43 43 from routes.util import URLGenerator
44 44
45 45 from rhodecode.lib.base import attach_context_attributes, get_auth_user
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def add_renderer_globals(event):
51 from rhodecode.lib import helpers
52
53 # NOTE(marcink):
51 54 # Put pylons stuff into the context. This will be removed as soon as
52 55 # migration to pyramid is finished.
53 conf = pylons.config._current_obj()
54 event['h'] = conf.get('pylons.h')
55 56 event['c'] = pylons.tmpl_context
56 57 event['url'] = pylons.url
57 58
58 59 # TODO: When executed in pyramid view context the request is not available
59 60 # in the event. Find a better solution to get the request.
60 61 request = event['request'] or get_current_request()
61 62
62 63 # Add Pyramid translation as '_' to context
63 64 event['_'] = request.translate
64 65 event['_ungettext'] = request.plularize
66 event['h'] = helpers
65 67
66 68
67 69 def add_localizer(event):
68 70 request = event.request
69 71 localizer = get_localizer(request)
70 72
71 73 def auto_translate(*args, **kwargs):
72 74 return localizer.translate(tsf(*args, **kwargs))
73 75
74 76 request.localizer = localizer
75 77 request.translate = auto_translate
76 78 request.plularize = localizer.pluralize
77 79
78 80
79 81 def set_user_lang(event):
80 82 request = event.request
81 83 cur_user = getattr(request, 'user', None)
82 84
83 85 if cur_user:
84 86 user_lang = cur_user.get_instance().user_data.get('language')
85 87 if user_lang:
86 88 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
87 89 event.request._LOCALE_ = user_lang
88 90
89 91
90 92 def add_request_user_context(event):
91 93 """
92 94 Adds auth user into request context
93 95 """
94 96 request = event.request
95 97
96 98 if hasattr(request, 'vcs_call'):
97 99 # skip vcs calls
98 100 return
99 101
100 102 if hasattr(request, 'rpc_method'):
101 103 # skip api calls
102 104 return
103 105
104 106 auth_user = get_auth_user(request)
105 107 request.user = auth_user
106 108 request.environ['rc_auth_user'] = auth_user
107 109
108 110
109 111 def add_pylons_context(event):
110 112 request = event.request
111 113
112 114 config = rhodecode.CONFIG
113 115 environ = request.environ
114 116 session = request.session
115 117
116 118 if hasattr(request, 'vcs_call'):
117 119 # skip vcs calls
118 120 return
119 121
120 122 # Setup pylons globals.
121 123 pylons.config._push_object(config)
122 124 pylons.request._push_object(request)
123 125 pylons.session._push_object(session)
124 126 pylons.translator._push_object(_get_translator(config.get('lang')))
125 127
126 128 pylons.url._push_object(URLGenerator(config['routes.map'], environ))
127 129 session_key = (
128 130 config['pylons.environ_config'].get('session', 'beaker.session'))
129 131 environ[session_key] = session
130 132
131 133 if hasattr(request, 'rpc_method'):
132 134 # skip api calls
133 135 return
134 136
135 137 # Setup the pylons context object ('c')
136 138 context = ContextObj()
137 139 context.rhodecode_user = request.user
138 140 attach_context_attributes(context, request, request.user.user_id)
139 141 pylons.tmpl_context._push_object(context)
140 142
141 143
142 144 def scan_repositories_if_enabled(event):
143 145 """
144 146 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
145 147 does a repository scan if enabled in the settings.
146 148 """
147 149 settings = event.app.registry.settings
148 150 vcs_server_enabled = settings['vcs.server.enable']
149 151 import_on_startup = settings['startup.import_repos']
150 152 if vcs_server_enabled and import_on_startup:
151 153 from rhodecode.model.scm import ScmModel
152 154 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
153 155 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
154 156 repo2db_mapper(repositories, remove_obsolete=False)
155 157
156 158
157 159 def write_metadata_if_needed(event):
158 160 """
159 161 Writes upgrade metadata
160 162 """
161 163 import rhodecode
162 164 from rhodecode.lib import system_info
163 165 from rhodecode.lib import ext_json
164 166
165 167 def write():
166 168 fname = '.rcmetadata.json'
167 169 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
168 170 metadata_destination = os.path.join(ini_loc, fname)
169 171
170 172 configuration = system_info.SysInfo(
171 173 system_info.rhodecode_config)()['value']
172 174 license_token = configuration['config']['license_token']
173 175 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
174 176 del dbinfo['url']
175 177 metadata = dict(
176 178 desc='upgrade metadata info',
177 179 license_token=license_token,
178 180 created_on=datetime.datetime.utcnow().isoformat(),
179 181 usage=system_info.SysInfo(system_info.usage_info)()['value'],
180 182 platform=system_info.SysInfo(system_info.platform_type)()['value'],
181 183 database=dbinfo,
182 184 cpu=system_info.SysInfo(system_info.cpu)()['value'],
183 185 memory=system_info.SysInfo(system_info.memory)()['value'],
184 186 )
185 187
186 188 with open(metadata_destination, 'wb') as f:
187 189 f.write(ext_json.json.dumps(metadata))
188 190
189 191 settings = event.app.registry.settings
190 192 if settings.get('metadata.skip'):
191 193 return
192 194
193 195 try:
194 196 write()
195 197 except Exception:
196 198 pass
197 199
198 200
199 201 def write_js_routes_if_enabled(event):
200 202 registry = event.app.registry
201 203
202 204 mapper = registry.queryUtility(IRoutesMapper)
203 205 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
204 206
205 207 def _extract_route_information(route):
206 208 """
207 209 Convert a route into tuple(name, path, args), eg:
208 210 ('show_user', '/profile/%(username)s', ['username'])
209 211 """
210 212
211 213 routepath = route.pattern
212 214 pattern = route.pattern
213 215
214 216 def replace(matchobj):
215 217 if matchobj.group(1):
216 218 return "%%(%s)s" % matchobj.group(1).split(':')[0]
217 219 else:
218 220 return "%%(%s)s" % matchobj.group(2)
219 221
220 222 routepath = _argument_prog.sub(replace, routepath)
221 223
222 224 if not routepath.startswith('/'):
223 225 routepath = '/'+routepath
224 226
225 227 return (
226 228 route.name,
227 229 routepath,
228 230 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
229 231 for arg in _argument_prog.findall(pattern)]
230 232 )
231 233
232 234 def get_routes():
233 235 # pylons routes
234 236 for route in rhodecode.CONFIG['routes.map'].jsroutes():
235 237 yield route
236 238
237 239 # pyramid routes
238 240 for route in mapper.get_routes():
239 241 if not route.name.startswith('__'):
240 242 yield _extract_route_information(route)
241 243
242 244 if asbool(registry.settings.get('generate_js_files', 'false')):
243 245 static_path = AssetResolver().resolve('rhodecode:public').abspath()
244 246 jsroutes = get_routes()
245 247 jsroutes_file_content = generate_jsroutes_content(jsroutes)
246 248 jsroutes_file_path = os.path.join(
247 249 static_path, 'js', 'rhodecode', 'routes.js')
248 250
249 251 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
250 252 f.write(jsroutes_file_content)
251 253
252 254
253 255 class Subscriber(object):
254 256 """
255 257 Base class for subscribers to the pyramid event system.
256 258 """
257 259 def __call__(self, event):
258 260 self.run(event)
259 261
260 262 def run(self, event):
261 263 raise NotImplementedError('Subclass has to implement this.')
262 264
263 265
264 266 class AsyncSubscriber(Subscriber):
265 267 """
266 268 Subscriber that handles the execution of events in a separate task to not
267 269 block the execution of the code which triggers the event. It puts the
268 270 received events into a queue from which the worker process takes them in
269 271 order.
270 272 """
271 273 def __init__(self):
272 274 self._stop = False
273 275 self._eventq = Queue.Queue()
274 276 self._worker = self.create_worker()
275 277 self._worker.start()
276 278
277 279 def __call__(self, event):
278 280 self._eventq.put(event)
279 281
280 282 def create_worker(self):
281 283 worker = Thread(target=self.do_work)
282 284 worker.daemon = True
283 285 return worker
284 286
285 287 def stop_worker(self):
286 288 self._stop = False
287 289 self._eventq.put(None)
288 290 self._worker.join()
289 291
290 292 def do_work(self):
291 293 while not self._stop:
292 294 event = self._eventq.get()
293 295 if event is not None:
294 296 self.run(event)
295 297
296 298
297 299 class AsyncSubprocessSubscriber(AsyncSubscriber):
298 300 """
299 301 Subscriber that uses the subprocess32 module to execute a command if an
300 302 event is received. Events are handled asynchronously.
301 303 """
302 304
303 305 def __init__(self, cmd, timeout=None):
304 306 super(AsyncSubprocessSubscriber, self).__init__()
305 307 self._cmd = cmd
306 308 self._timeout = timeout
307 309
308 310 def run(self, event):
309 311 cmd = self._cmd
310 312 timeout = self._timeout
311 313 log.debug('Executing command %s.', cmd)
312 314
313 315 try:
314 316 output = subprocess32.check_output(
315 317 cmd, timeout=timeout, stderr=subprocess32.STDOUT)
316 318 log.debug('Command finished %s', cmd)
317 319 if output:
318 320 log.debug('Command output: %s', output)
319 321 except subprocess32.TimeoutExpired as e:
320 322 log.exception('Timeout while executing command.')
321 323 if e.output:
322 324 log.error('Command output: %s', e.output)
323 325 except subprocess32.CalledProcessError as e:
324 326 log.exception('Error while executing command.')
325 327 if e.output:
326 328 log.error('Command output: %s', e.output)
327 329 except:
328 330 log.exception(
329 331 'Exception while executing command %s.', cmd)
@@ -1,987 +1,987 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.controllers.files import FilesController
27 27 from rhodecode.lib import helpers as h
28 28 from rhodecode.lib.compat import OrderedDict
29 29 from rhodecode.lib.ext_json import json
30 30 from rhodecode.lib.vcs import nodes
31 31
32 32 from rhodecode.lib.vcs.conf import settings
33 33 from rhodecode.tests import (
34 34 url, assert_session_flash, assert_not_in_session_flash)
35 35 from rhodecode.tests.fixture import Fixture
36 36
37 37 fixture = Fixture()
38 38
39 39 NODE_HISTORY = {
40 40 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
41 41 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
42 42 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
43 43 }
44 44
45 45
46 46
47 47 @pytest.mark.usefixtures("app")
48 48 class TestFilesController:
49 49
50 50 def test_index(self, backend):
51 51 response = self.app.get(url(
52 52 controller='files', action='index',
53 53 repo_name=backend.repo_name, revision='tip', f_path='/'))
54 54 commit = backend.repo.get_commit()
55 55
56 56 params = {
57 57 'repo_name': backend.repo_name,
58 58 'commit_id': commit.raw_id,
59 59 'date': commit.date
60 60 }
61 61 assert_dirs_in_response(response, ['docs', 'vcs'], params)
62 62 files = [
63 63 '.gitignore',
64 64 '.hgignore',
65 65 '.hgtags',
66 66 # TODO: missing in Git
67 67 # '.travis.yml',
68 68 'MANIFEST.in',
69 69 'README.rst',
70 70 # TODO: File is missing in svn repository
71 71 # 'run_test_and_report.sh',
72 72 'setup.cfg',
73 73 'setup.py',
74 74 'test_and_report.sh',
75 75 'tox.ini',
76 76 ]
77 77 assert_files_in_response(response, files, params)
78 78 assert_timeago_in_response(response, files, params)
79 79
80 80 def test_index_links_submodules_with_absolute_url(self, backend_hg):
81 81 repo = backend_hg['subrepos']
82 82 response = self.app.get(url(
83 83 controller='files', action='index',
84 84 repo_name=repo.repo_name, revision='tip', f_path='/'))
85 85 assert_response = response.assert_response()
86 86 assert_response.contains_one_link(
87 87 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
88 88
89 89 def test_index_links_submodules_with_absolute_url_subpaths(
90 90 self, backend_hg):
91 91 repo = backend_hg['subrepos']
92 92 response = self.app.get(url(
93 93 controller='files', action='index',
94 94 repo_name=repo.repo_name, revision='tip', f_path='/'))
95 95 assert_response = response.assert_response()
96 96 assert_response.contains_one_link(
97 97 'subpaths-path @ 000000000000',
98 98 'http://sub-base.example.com/subpaths-path')
99 99
100 100 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
101 101 def test_files_menu(self, backend):
102 102 new_branch = "temp_branch_name"
103 103 commits = [
104 104 {'message': 'a'},
105 105 {'message': 'b', 'branch': new_branch}
106 106 ]
107 107 backend.create_repo(commits)
108 108
109 109 backend.repo.landing_rev = "branch:%s" % new_branch
110 110
111 111 # get response based on tip and not new revision
112 112 response = self.app.get(url(
113 113 controller='files', action='index',
114 114 repo_name=backend.repo_name, revision='tip', f_path='/'),
115 115 status=200)
116 116
117 117 # make sure Files menu url is not tip but new revision
118 118 landing_rev = backend.repo.landing_rev[1]
119 119 files_url = url('files_home', repo_name=backend.repo_name,
120 120 revision=landing_rev)
121 121
122 122 assert landing_rev != 'tip'
123 123 response.mustcontain('<li class="active"><a class="menulink" href="%s">' % files_url)
124 124
125 125 def test_index_commit(self, backend):
126 126 commit = backend.repo.get_commit(commit_idx=32)
127 127
128 128 response = self.app.get(url(
129 129 controller='files', action='index',
130 130 repo_name=backend.repo_name,
131 131 revision=commit.raw_id,
132 132 f_path='/')
133 133 )
134 134
135 135 dirs = ['docs', 'tests']
136 136 files = ['README.rst']
137 137 params = {
138 138 'repo_name': backend.repo_name,
139 139 'commit_id': commit.raw_id,
140 140 }
141 141 assert_dirs_in_response(response, dirs, params)
142 142 assert_files_in_response(response, files, params)
143 143
144 144 def test_index_different_branch(self, backend):
145 145 branches = dict(
146 146 hg=(150, ['git']),
147 147 # TODO: Git test repository does not contain other branches
148 148 git=(633, ['master']),
149 149 # TODO: Branch support in Subversion
150 150 svn=(150, [])
151 151 )
152 152 idx, branches = branches[backend.alias]
153 153 commit = backend.repo.get_commit(commit_idx=idx)
154 154 response = self.app.get(url(
155 155 controller='files', action='index',
156 156 repo_name=backend.repo_name,
157 157 revision=commit.raw_id,
158 158 f_path='/'))
159 159 assert_response = response.assert_response()
160 160 for branch in branches:
161 161 assert_response.element_contains('.tags .branchtag', branch)
162 162
163 163 def test_index_paging(self, backend):
164 164 repo = backend.repo
165 165 indexes = [73, 92, 109, 1, 0]
166 166 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
167 167 for rev in indexes]
168 168
169 169 for idx in idx_map:
170 170 response = self.app.get(url(
171 171 controller='files', action='index',
172 172 repo_name=backend.repo_name,
173 173 revision=idx[1],
174 174 f_path='/'))
175 175
176 176 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
177 177
178 178 def test_file_source(self, backend):
179 179 commit = backend.repo.get_commit(commit_idx=167)
180 180 response = self.app.get(url(
181 181 controller='files', action='index',
182 182 repo_name=backend.repo_name,
183 183 revision=commit.raw_id,
184 184 f_path='vcs/nodes.py'))
185 185
186 186 msgbox = """<div class="commit right-content">%s</div>"""
187 187 response.mustcontain(msgbox % (commit.message, ))
188 188
189 189 assert_response = response.assert_response()
190 190 if commit.branch:
191 191 assert_response.element_contains('.tags.tags-main .branchtag', commit.branch)
192 192 if commit.tags:
193 193 for tag in commit.tags:
194 194 assert_response.element_contains('.tags.tags-main .tagtag', tag)
195 195
196 196 def test_file_source_history(self, backend):
197 197 response = self.app.get(
198 198 url(
199 199 controller='files', action='history',
200 200 repo_name=backend.repo_name,
201 201 revision='tip',
202 202 f_path='vcs/nodes.py'),
203 203 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
204 204 assert NODE_HISTORY[backend.alias] == json.loads(response.body)
205 205
206 206 def test_file_source_history_svn(self, backend_svn):
207 207 simple_repo = backend_svn['svn-simple-layout']
208 208 response = self.app.get(
209 209 url(
210 210 controller='files', action='history',
211 211 repo_name=simple_repo.repo_name,
212 212 revision='tip',
213 213 f_path='trunk/example.py'),
214 214 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
215 215
216 216 expected_data = json.loads(
217 217 fixture.load_resource('svn_node_history_branches.json'))
218 218 assert expected_data == response.json
219 219
220 220 def test_file_annotation_history(self, backend):
221 221 response = self.app.get(
222 222 url(
223 223 controller='files', action='history',
224 224 repo_name=backend.repo_name,
225 225 revision='tip',
226 226 f_path='vcs/nodes.py',
227 227 annotate=True),
228 228 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
229 229 assert NODE_HISTORY[backend.alias] == json.loads(response.body)
230 230
231 231 def test_file_annotation(self, backend):
232 232 response = self.app.get(url(
233 233 controller='files', action='index',
234 234 repo_name=backend.repo_name, revision='tip', f_path='vcs/nodes.py',
235 235 annotate=True))
236 236
237 237 expected_revisions = {
238 238 'hg': 'r356',
239 239 'git': 'r345',
240 240 'svn': 'r208',
241 241 }
242 242 response.mustcontain(expected_revisions[backend.alias])
243 243
244 244 def test_file_authors(self, backend):
245 245 response = self.app.get(url(
246 246 controller='files', action='authors',
247 247 repo_name=backend.repo_name,
248 248 revision='tip',
249 249 f_path='vcs/nodes.py',
250 250 annotate=True))
251 251
252 252 expected_authors = {
253 253 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
254 254 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
255 255 'svn': ('marcin', 'lukasz'),
256 256 }
257 257
258 258 for author in expected_authors[backend.alias]:
259 259 response.mustcontain(author)
260 260
261 261 def test_tree_search_top_level(self, backend, xhr_header):
262 262 commit = backend.repo.get_commit(commit_idx=173)
263 263 response = self.app.get(
264 264 url('files_nodelist_home', repo_name=backend.repo_name,
265 265 revision=commit.raw_id, f_path='/'),
266 266 extra_environ=xhr_header)
267 267 assert 'nodes' in response.json
268 268 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
269 269
270 270 def test_tree_search_at_path(self, backend, xhr_header):
271 271 commit = backend.repo.get_commit(commit_idx=173)
272 272 response = self.app.get(
273 273 url('files_nodelist_home', repo_name=backend.repo_name,
274 274 revision=commit.raw_id, f_path='/docs'),
275 275 extra_environ=xhr_header)
276 276 assert 'nodes' in response.json
277 277 nodes = response.json['nodes']
278 278 assert {'name': 'docs/api', 'type': 'dir'} in nodes
279 279 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
280 280
281 281 def test_tree_search_at_path_missing_xhr(self, backend):
282 282 self.app.get(
283 283 url('files_nodelist_home', repo_name=backend.repo_name,
284 284 revision='tip', f_path=''), status=400)
285 285
286 286 def test_tree_view_list(self, backend, xhr_header):
287 287 commit = backend.repo.get_commit(commit_idx=173)
288 288 response = self.app.get(
289 289 url('files_nodelist_home', repo_name=backend.repo_name,
290 290 f_path='/', revision=commit.raw_id),
291 291 extra_environ=xhr_header,
292 292 )
293 293 response.mustcontain("vcs/web/simplevcs/views/repository.py")
294 294
295 295 def test_tree_view_list_at_path(self, backend, xhr_header):
296 296 commit = backend.repo.get_commit(commit_idx=173)
297 297 response = self.app.get(
298 298 url('files_nodelist_home', repo_name=backend.repo_name,
299 299 f_path='/docs', revision=commit.raw_id),
300 300 extra_environ=xhr_header,
301 301 )
302 302 response.mustcontain("docs/index.rst")
303 303
304 304 def test_tree_view_list_missing_xhr(self, backend):
305 305 self.app.get(
306 306 url('files_nodelist_home', repo_name=backend.repo_name,
307 307 f_path='/', revision='tip'), status=400)
308 308
309 309 def test_nodetree_full_success(self, backend, xhr_header):
310 310 commit = backend.repo.get_commit(commit_idx=173)
311 311 response = self.app.get(
312 312 url('files_nodetree_full', repo_name=backend.repo_name,
313 313 f_path='/', commit_id=commit.raw_id),
314 314 extra_environ=xhr_header)
315 315
316 316 assert_response = response.assert_response()
317 317
318 318 for attr in ['data-commit-id', 'data-date', 'data-author']:
319 319 elements = assert_response.get_elements('[{}]'.format(attr))
320 320 assert len(elements) > 1
321 321
322 322 for element in elements:
323 323 assert element.get(attr)
324 324
325 325 def test_nodetree_full_if_file(self, backend, xhr_header):
326 326 commit = backend.repo.get_commit(commit_idx=173)
327 327 response = self.app.get(
328 328 url('files_nodetree_full', repo_name=backend.repo_name,
329 329 f_path='README.rst', commit_id=commit.raw_id),
330 330 extra_environ=xhr_header)
331 331 assert response.body == ''
332 332
333 333 def test_tree_metadata_list_missing_xhr(self, backend):
334 334 self.app.get(
335 335 url('files_nodetree_full', repo_name=backend.repo_name,
336 336 f_path='/', commit_id='tip'), status=400)
337 337
338 338 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
339 339 self, app, backend_stub, autologin_regular_user, user_regular,
340 340 user_util):
341 341 repo = backend_stub.create_repo()
342 342 user_util.grant_user_permission_to_repo(
343 343 repo, user_regular, 'repository.write')
344 344 response = self.app.get(url(
345 345 controller='files', action='index',
346 346 repo_name=repo.repo_name, revision='tip', f_path='/'))
347 347 assert_session_flash(
348 348 response,
349 349 'There are no files yet. <a class="alert-link" '
350 350 'href="/%s/add/0/#edit">Click here to add a new file.</a>'
351 351 % (repo.repo_name))
352 352
353 353 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
354 354 self, backend_stub, user_util):
355 355 repo = backend_stub.create_repo()
356 356 repo_file_url = url(
357 357 'files_add_home',
358 358 repo_name=repo.repo_name,
359 359 revision=0, f_path='', anchor='edit')
360 360 response = self.app.get(url(
361 361 controller='files', action='index',
362 362 repo_name=repo.repo_name, revision='tip', f_path='/'))
363 363 assert_not_in_session_flash(response, repo_file_url)
364 364
365 365
366 366 # TODO: johbo: Think about a better place for these tests. Either controller
367 367 # specific unit tests or we move down the whole logic further towards the vcs
368 368 # layer
369 369 class TestAdjustFilePathForSvn(object):
370 370 """SVN specific adjustments of node history in FileController."""
371 371
372 372 def test_returns_path_relative_to_matched_reference(self):
373 373 repo = self._repo(branches=['trunk'])
374 374 self.assert_file_adjustment('trunk/file', 'file', repo)
375 375
376 376 def test_does_not_modify_file_if_no_reference_matches(self):
377 377 repo = self._repo(branches=['trunk'])
378 378 self.assert_file_adjustment('notes/file', 'notes/file', repo)
379 379
380 380 def test_does_not_adjust_partial_directory_names(self):
381 381 repo = self._repo(branches=['trun'])
382 382 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
383 383
384 384 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
385 385 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
386 386 self.assert_file_adjustment('trunk/new/file', 'file', repo)
387 387
388 388 def assert_file_adjustment(self, f_path, expected, repo):
389 389 controller = FilesController()
390 390 result = controller._adjust_file_path_for_svn(f_path, repo)
391 391 assert result == expected
392 392
393 393 def _repo(self, branches=None):
394 394 repo = mock.Mock()
395 395 repo.branches = OrderedDict((name, '0') for name in branches or [])
396 396 repo.tags = {}
397 397 return repo
398 398
399 399
400 400 @pytest.mark.usefixtures("app")
401 401 class TestRepositoryArchival(object):
402 402
403 403 def test_archival(self, backend):
404 404 backend.enable_downloads()
405 405 commit = backend.repo.get_commit(commit_idx=173)
406 406 for archive, info in settings.ARCHIVE_SPECS.items():
407 407 mime_type, arch_ext = info
408 408 short = commit.short_id + arch_ext
409 409 fname = commit.raw_id + arch_ext
410 410 filename = '%s-%s' % (backend.repo_name, short)
411 411 response = self.app.get(url(controller='files',
412 412 action='archivefile',
413 413 repo_name=backend.repo_name,
414 414 fname=fname))
415 415
416 416 assert response.status == '200 OK'
417 headers = {
418 'Pragma': 'no-cache',
419 'Cache-Control': 'no-cache',
420 'Content-Disposition': 'attachment; filename=%s' % filename,
421 'Content-Type': '%s; charset=utf-8' % mime_type,
422 }
417 headers = [
418 ('Pragma', 'no-cache'),
419 ('Cache-Control', 'no-cache'),
420 ('Content-Disposition', 'attachment; filename=%s' % filename),
421 ('Content-Type', '%s' % mime_type),
422 ]
423 423 if 'Set-Cookie' in response.response.headers:
424 424 del response.response.headers['Set-Cookie']
425 assert response.response.headers == headers
425 assert response.response.headers.items() == headers
426 426
427 427 def test_archival_wrong_ext(self, backend):
428 428 backend.enable_downloads()
429 429 commit = backend.repo.get_commit(commit_idx=173)
430 430 for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']:
431 431 fname = commit.raw_id + arch_ext
432 432
433 433 response = self.app.get(url(controller='files',
434 434 action='archivefile',
435 435 repo_name=backend.repo_name,
436 436 fname=fname))
437 437 response.mustcontain('Unknown archive type')
438 438
439 439 def test_archival_wrong_commit_id(self, backend):
440 440 backend.enable_downloads()
441 441 for commit_id in ['00x000000', 'tar', 'wrong', '@##$@$42413232',
442 442 '232dffcd']:
443 443 fname = '%s.zip' % commit_id
444 444
445 445 response = self.app.get(url(controller='files',
446 446 action='archivefile',
447 447 repo_name=backend.repo_name,
448 448 fname=fname))
449 449 response.mustcontain('Unknown revision')
450 450
451 451
452 452 @pytest.mark.usefixtures("app", "autologin_user")
453 453 class TestRawFileHandling(object):
454 454
455 455 def test_raw_file_ok(self, backend):
456 456 commit = backend.repo.get_commit(commit_idx=173)
457 457 response = self.app.get(url(controller='files', action='rawfile',
458 458 repo_name=backend.repo_name,
459 459 revision=commit.raw_id,
460 460 f_path='vcs/nodes.py'))
461 461
462 462 assert response.content_disposition == "attachment; filename=nodes.py"
463 463 assert response.content_type == "text/x-python"
464 464
465 465 def test_raw_file_wrong_cs(self, backend):
466 466 commit_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
467 467 f_path = 'vcs/nodes.py'
468 468
469 469 response = self.app.get(url(controller='files', action='rawfile',
470 470 repo_name=backend.repo_name,
471 471 revision=commit_id,
472 472 f_path=f_path), status=404)
473 473
474 474 msg = """No such commit exists for this repository"""
475 475 response.mustcontain(msg)
476 476
477 477 def test_raw_file_wrong_f_path(self, backend):
478 478 commit = backend.repo.get_commit(commit_idx=173)
479 479 f_path = 'vcs/ERRORnodes.py'
480 480 response = self.app.get(url(controller='files', action='rawfile',
481 481 repo_name=backend.repo_name,
482 482 revision=commit.raw_id,
483 483 f_path=f_path), status=404)
484 484
485 485 msg = (
486 486 "There is no file nor directory at the given path: "
487 487 "`%s` at commit %s" % (f_path, commit.short_id))
488 488 response.mustcontain(msg)
489 489
490 490 def test_raw_ok(self, backend):
491 491 commit = backend.repo.get_commit(commit_idx=173)
492 492 response = self.app.get(url(controller='files', action='raw',
493 493 repo_name=backend.repo_name,
494 494 revision=commit.raw_id,
495 495 f_path='vcs/nodes.py'))
496 496
497 497 assert response.content_type == "text/plain"
498 498
499 499 def test_raw_wrong_cs(self, backend):
500 500 commit_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
501 501 f_path = 'vcs/nodes.py'
502 502
503 503 response = self.app.get(url(controller='files', action='raw',
504 504 repo_name=backend.repo_name,
505 505 revision=commit_id,
506 506 f_path=f_path), status=404)
507 507
508 508 msg = """No such commit exists for this repository"""
509 509 response.mustcontain(msg)
510 510
511 511 def test_raw_wrong_f_path(self, backend):
512 512 commit = backend.repo.get_commit(commit_idx=173)
513 513 f_path = 'vcs/ERRORnodes.py'
514 514 response = self.app.get(url(controller='files', action='raw',
515 515 repo_name=backend.repo_name,
516 516 revision=commit.raw_id,
517 517 f_path=f_path), status=404)
518 518 msg = (
519 519 "There is no file nor directory at the given path: "
520 520 "`%s` at commit %s" % (f_path, commit.short_id))
521 521 response.mustcontain(msg)
522 522
523 523 def test_raw_svg_should_not_be_rendered(self, backend):
524 524 backend.create_repo()
525 525 backend.ensure_file("xss.svg")
526 526 response = self.app.get(url(controller='files', action='raw',
527 527 repo_name=backend.repo_name,
528 528 revision='tip',
529 529 f_path='xss.svg'))
530 530
531 531 # If the content type is image/svg+xml then it allows to render HTML
532 532 # and malicious SVG.
533 533 assert response.content_type == "text/plain"
534 534
535 535
536 536 @pytest.mark.usefixtures("app")
537 537 class TestFilesDiff:
538 538
539 539 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
540 540 def test_file_full_diff(self, backend, diff):
541 541 commit1 = backend.repo.get_commit(commit_idx=-1)
542 542 commit2 = backend.repo.get_commit(commit_idx=-2)
543 543
544 544 response = self.app.get(
545 545 url(
546 546 controller='files',
547 547 action='diff',
548 548 repo_name=backend.repo_name,
549 549 f_path='README'),
550 550 params={
551 551 'diff1': commit2.raw_id,
552 552 'diff2': commit1.raw_id,
553 553 'fulldiff': '1',
554 554 'diff': diff,
555 555 })
556 556
557 557 if diff == 'diff':
558 558 # use redirect since this is OLD view redirecting to compare page
559 559 response = response.follow()
560 560
561 561 # It's a symlink to README.rst
562 562 response.mustcontain('README.rst')
563 563 response.mustcontain('No newline at end of file')
564 564
565 565 def test_file_binary_diff(self, backend):
566 566 commits = [
567 567 {'message': 'First commit'},
568 568 {'message': 'Commit with binary',
569 569 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
570 570 ]
571 571 repo = backend.create_repo(commits=commits)
572 572
573 573 response = self.app.get(
574 574 url(
575 575 controller='files',
576 576 action='diff',
577 577 repo_name=backend.repo_name,
578 578 f_path='file.bin'),
579 579 params={
580 580 'diff1': repo.get_commit(commit_idx=0).raw_id,
581 581 'diff2': repo.get_commit(commit_idx=1).raw_id,
582 582 'fulldiff': '1',
583 583 'diff': 'diff',
584 584 })
585 585 # use redirect since this is OLD view redirecting to compare page
586 586 response = response.follow()
587 587 response.mustcontain('Expand 1 commit')
588 588 response.mustcontain('1 file changed: 0 inserted, 0 deleted')
589 589
590 590 if backend.alias == 'svn':
591 591 response.mustcontain('new file 10644')
592 592 # TODO(marcink): SVN doesn't yet detect binary changes
593 593 else:
594 594 response.mustcontain('new file 100644')
595 595 response.mustcontain('binary diff hidden')
596 596
597 597 def test_diff_2way(self, backend):
598 598 commit1 = backend.repo.get_commit(commit_idx=-1)
599 599 commit2 = backend.repo.get_commit(commit_idx=-2)
600 600 response = self.app.get(
601 601 url(
602 602 controller='files',
603 603 action='diff_2way',
604 604 repo_name=backend.repo_name,
605 605 f_path='README'),
606 606 params={
607 607 'diff1': commit2.raw_id,
608 608 'diff2': commit1.raw_id,
609 609 })
610 610 # use redirect since this is OLD view redirecting to compare page
611 611 response = response.follow()
612 612
613 613 # It's a symlink to README.rst
614 614 response.mustcontain('README.rst')
615 615 response.mustcontain('No newline at end of file')
616 616
617 617 def test_requires_one_commit_id(self, backend, autologin_user):
618 618 response = self.app.get(
619 619 url(
620 620 controller='files',
621 621 action='diff',
622 622 repo_name=backend.repo_name,
623 623 f_path='README.rst'),
624 624 status=400)
625 625 response.mustcontain(
626 626 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
627 627
628 628 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
629 629 repo = vcsbackend.repo
630 630 response = self.app.get(
631 631 url(
632 632 controller='files',
633 633 action='diff',
634 634 repo_name=repo.name,
635 635 f_path='does-not-exist-in-any-commit',
636 636 diff1=repo[0].raw_id,
637 637 diff2=repo[1].raw_id),)
638 638
639 639 response = response.follow()
640 640 response.mustcontain('No files')
641 641
642 642 def test_returns_redirect_if_file_not_changed(self, backend):
643 643 commit = backend.repo.get_commit(commit_idx=-1)
644 644 f_path = 'README'
645 645 response = self.app.get(
646 646 url(
647 647 controller='files',
648 648 action='diff_2way',
649 649 repo_name=backend.repo_name,
650 650 f_path=f_path,
651 651 diff1=commit.raw_id,
652 652 diff2=commit.raw_id,
653 653 ),
654 654 )
655 655 response = response.follow()
656 656 response.mustcontain('No files')
657 657 response.mustcontain('No commits in this compare')
658 658
659 659 def test_supports_diff_to_different_path_svn(self, backend_svn):
660 660 #TODO: check this case
661 661 return
662 662
663 663 repo = backend_svn['svn-simple-layout'].scm_instance()
664 664 commit_id_1 = '24'
665 665 commit_id_2 = '26'
666 666
667 667
668 668 print( url(
669 669 controller='files',
670 670 action='diff',
671 671 repo_name=repo.name,
672 672 f_path='trunk/example.py',
673 673 diff1='tags/v0.2/example.py@' + commit_id_1,
674 674 diff2=commit_id_2))
675 675
676 676 response = self.app.get(
677 677 url(
678 678 controller='files',
679 679 action='diff',
680 680 repo_name=repo.name,
681 681 f_path='trunk/example.py',
682 682 diff1='tags/v0.2/example.py@' + commit_id_1,
683 683 diff2=commit_id_2))
684 684
685 685 response = response.follow()
686 686 response.mustcontain(
687 687 # diff contains this
688 688 "Will print out a useful message on invocation.")
689 689
690 690 # Note: Expecting that we indicate the user what's being compared
691 691 response.mustcontain("trunk/example.py")
692 692 response.mustcontain("tags/v0.2/example.py")
693 693
694 694 def test_show_rev_redirects_to_svn_path(self, backend_svn):
695 695 #TODO: check this case
696 696 return
697 697
698 698 repo = backend_svn['svn-simple-layout'].scm_instance()
699 699 commit_id = repo[-1].raw_id
700 700 response = self.app.get(
701 701 url(
702 702 controller='files',
703 703 action='diff',
704 704 repo_name=repo.name,
705 705 f_path='trunk/example.py',
706 706 diff1='branches/argparse/example.py@' + commit_id,
707 707 diff2=commit_id),
708 708 params={'show_rev': 'Show at Revision'},
709 709 status=302)
710 710 assert response.headers['Location'].endswith(
711 711 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
712 712
713 713 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
714 714 #TODO: check this case
715 715 return
716 716
717 717 repo = backend_svn['svn-simple-layout'].scm_instance()
718 718 commit_id = repo[-1].raw_id
719 719 response = self.app.get(
720 720 url(
721 721 controller='files',
722 722 action='diff',
723 723 repo_name=repo.name,
724 724 f_path='trunk/example.py',
725 725 diff1='branches/argparse/example.py@' + commit_id,
726 726 diff2=commit_id),
727 727 params={
728 728 'show_rev': 'Show at Revision',
729 729 'annotate': 'true',
730 730 },
731 731 status=302)
732 732 assert response.headers['Location'].endswith(
733 733 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
734 734
735 735
736 736 @pytest.mark.usefixtures("app", "autologin_user")
737 737 class TestChangingFiles:
738 738
739 739 def test_add_file_view(self, backend):
740 740 self.app.get(url(
741 741 'files_add_home',
742 742 repo_name=backend.repo_name,
743 743 revision='tip', f_path='/'))
744 744
745 745 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
746 746 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
747 747 repo = backend.create_repo()
748 748 filename = 'init.py'
749 749 response = self.app.post(
750 750 url(
751 751 'files_add',
752 752 repo_name=repo.repo_name,
753 753 revision='tip', f_path='/'),
754 754 params={
755 755 'content': "",
756 756 'filename': filename,
757 757 'location': "",
758 758 'csrf_token': csrf_token,
759 759 },
760 760 status=302)
761 761 assert_session_flash(response,
762 762 'Successfully committed new file `{}`'.format(os.path.join(filename)))
763 763
764 764 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
765 765 response = self.app.post(
766 766 url(
767 767 'files_add',
768 768 repo_name=backend.repo_name,
769 769 revision='tip', f_path='/'),
770 770 params={
771 771 'content': "foo",
772 772 'csrf_token': csrf_token,
773 773 },
774 774 status=302)
775 775
776 776 assert_session_flash(response, 'No filename')
777 777
778 778 def test_add_file_into_repo_errors_and_no_commits(
779 779 self, backend, csrf_token):
780 780 repo = backend.create_repo()
781 781 # Create a file with no filename, it will display an error but
782 782 # the repo has no commits yet
783 783 response = self.app.post(
784 784 url(
785 785 'files_add',
786 786 repo_name=repo.repo_name,
787 787 revision='tip', f_path='/'),
788 788 params={
789 789 'content': "foo",
790 790 'csrf_token': csrf_token,
791 791 },
792 792 status=302)
793 793
794 794 assert_session_flash(response, 'No filename')
795 795
796 796 # Not allowed, redirect to the summary
797 797 redirected = response.follow()
798 798 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
799 799
800 800 # As there are no commits, displays the summary page with the error of
801 801 # creating a file with no filename
802 802
803 803 assert redirected.request.path == summary_url
804 804
805 805 @pytest.mark.parametrize("location, filename", [
806 806 ('/abs', 'foo'),
807 807 ('../rel', 'foo'),
808 808 ('file/../foo', 'foo'),
809 809 ])
810 810 def test_add_file_into_repo_bad_filenames(
811 811 self, location, filename, backend, csrf_token):
812 812 response = self.app.post(
813 813 url(
814 814 'files_add',
815 815 repo_name=backend.repo_name,
816 816 revision='tip', f_path='/'),
817 817 params={
818 818 'content': "foo",
819 819 'filename': filename,
820 820 'location': location,
821 821 'csrf_token': csrf_token,
822 822 },
823 823 status=302)
824 824
825 825 assert_session_flash(
826 826 response,
827 827 'The location specified must be a relative path and must not '
828 828 'contain .. in the path')
829 829
830 830 @pytest.mark.parametrize("cnt, location, filename", [
831 831 (1, '', 'foo.txt'),
832 832 (2, 'dir', 'foo.rst'),
833 833 (3, 'rel/dir', 'foo.bar'),
834 834 ])
835 835 def test_add_file_into_repo(self, cnt, location, filename, backend,
836 836 csrf_token):
837 837 repo = backend.create_repo()
838 838 response = self.app.post(
839 839 url(
840 840 'files_add',
841 841 repo_name=repo.repo_name,
842 842 revision='tip', f_path='/'),
843 843 params={
844 844 'content': "foo",
845 845 'filename': filename,
846 846 'location': location,
847 847 'csrf_token': csrf_token,
848 848 },
849 849 status=302)
850 850 assert_session_flash(response,
851 851 'Successfully committed new file `{}`'.format(
852 852 os.path.join(location, filename)))
853 853
854 854 def test_edit_file_view(self, backend):
855 855 response = self.app.get(
856 856 url(
857 857 'files_edit_home',
858 858 repo_name=backend.repo_name,
859 859 revision=backend.default_head_id,
860 860 f_path='vcs/nodes.py'),
861 861 status=200)
862 862 response.mustcontain("Module holding everything related to vcs nodes.")
863 863
864 864 def test_edit_file_view_not_on_branch(self, backend):
865 865 repo = backend.create_repo()
866 866 backend.ensure_file("vcs/nodes.py")
867 867
868 868 response = self.app.get(
869 869 url(
870 870 'files_edit_home',
871 871 repo_name=repo.repo_name,
872 872 revision='tip', f_path='vcs/nodes.py'),
873 873 status=302)
874 874 assert_session_flash(
875 875 response,
876 876 'You can only edit files with revision being a valid branch')
877 877
878 878 def test_edit_file_view_commit_changes(self, backend, csrf_token):
879 879 repo = backend.create_repo()
880 880 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
881 881
882 882 response = self.app.post(
883 883 url(
884 884 'files_edit',
885 885 repo_name=repo.repo_name,
886 886 revision=backend.default_head_id,
887 887 f_path='vcs/nodes.py'),
888 888 params={
889 889 'content': "print 'hello world'",
890 890 'message': 'I committed',
891 891 'filename': "vcs/nodes.py",
892 892 'csrf_token': csrf_token,
893 893 },
894 894 status=302)
895 895 assert_session_flash(
896 896 response, 'Successfully committed changes to file `vcs/nodes.py`')
897 897 tip = repo.get_commit(commit_idx=-1)
898 898 assert tip.message == 'I committed'
899 899
900 900 def test_edit_file_view_commit_changes_default_message(self, backend,
901 901 csrf_token):
902 902 repo = backend.create_repo()
903 903 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
904 904
905 905 commit_id = (
906 906 backend.default_branch_name or
907 907 backend.repo.scm_instance().commit_ids[-1])
908 908
909 909 response = self.app.post(
910 910 url(
911 911 'files_edit',
912 912 repo_name=repo.repo_name,
913 913 revision=commit_id,
914 914 f_path='vcs/nodes.py'),
915 915 params={
916 916 'content': "print 'hello world'",
917 917 'message': '',
918 918 'filename': "vcs/nodes.py",
919 919 'csrf_token': csrf_token,
920 920 },
921 921 status=302)
922 922 assert_session_flash(
923 923 response, 'Successfully committed changes to file `vcs/nodes.py`')
924 924 tip = repo.get_commit(commit_idx=-1)
925 925 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
926 926
927 927 def test_delete_file_view(self, backend):
928 928 self.app.get(url(
929 929 'files_delete_home',
930 930 repo_name=backend.repo_name,
931 931 revision='tip', f_path='vcs/nodes.py'))
932 932
933 933 def test_delete_file_view_not_on_branch(self, backend):
934 934 repo = backend.create_repo()
935 935 backend.ensure_file('vcs/nodes.py')
936 936
937 937 response = self.app.get(
938 938 url(
939 939 'files_delete_home',
940 940 repo_name=repo.repo_name,
941 941 revision='tip', f_path='vcs/nodes.py'),
942 942 status=302)
943 943 assert_session_flash(
944 944 response,
945 945 'You can only delete files with revision being a valid branch')
946 946
947 947 def test_delete_file_view_commit_changes(self, backend, csrf_token):
948 948 repo = backend.create_repo()
949 949 backend.ensure_file("vcs/nodes.py")
950 950
951 951 response = self.app.post(
952 952 url(
953 953 'files_delete_home',
954 954 repo_name=repo.repo_name,
955 955 revision=backend.default_head_id,
956 956 f_path='vcs/nodes.py'),
957 957 params={
958 958 'message': 'i commited',
959 959 'csrf_token': csrf_token,
960 960 },
961 961 status=302)
962 962 assert_session_flash(
963 963 response, 'Successfully deleted file `vcs/nodes.py`')
964 964
965 965
966 966 def assert_files_in_response(response, files, params):
967 967 template = (
968 968 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
969 969 _assert_items_in_response(response, files, template, params)
970 970
971 971
972 972 def assert_dirs_in_response(response, dirs, params):
973 973 template = (
974 974 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
975 975 _assert_items_in_response(response, dirs, template, params)
976 976
977 977
978 978 def _assert_items_in_response(response, items, template, params):
979 979 for item in items:
980 980 item_params = {'name': item}
981 981 item_params.update(params)
982 982 response.mustcontain(template % item_params)
983 983
984 984
985 985 def assert_timeago_in_response(response, items, params):
986 986 for item in items:
987 987 response.mustcontain(h.age_component(params['date']))
@@ -1,452 +1,455 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import base64
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.tests.utils import CustomTestApp
27 27
28 28 from rhodecode.lib.caching_query import FromCache
29 29 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 30 from rhodecode.lib.middleware import simplevcs
31 31 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 32 from rhodecode.lib.middleware.utils import scm_app_http
33 33 from rhodecode.model.db import User, _hash_key
34 34 from rhodecode.model.meta import Session
35 35 from rhodecode.tests import (
36 36 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 37 from rhodecode.tests.lib.middleware import mock_scm_app
38 38
39 39
40 40 class StubVCSController(simplevcs.SimpleVCS):
41 41
42 42 SCM = 'hg'
43 43 stub_response_body = tuple()
44 44
45 45 def __init__(self, *args, **kwargs):
46 46 super(StubVCSController, self).__init__(*args, **kwargs)
47 47 self._action = 'pull'
48 48 self._name = HG_REPO
49 49 self.set_repo_names(None)
50 50
51 51 def _get_repository_name(self, environ):
52 52 return self._name
53 53
54 54 def _get_action(self, environ):
55 55 return self._action
56 56
57 57 def _create_wsgi_app(self, repo_path, repo_name, config):
58 58 def fake_app(environ, start_response):
59 start_response('200 OK', [])
59 headers = [
60 ('Http-Accept', 'application/mercurial')
61 ]
62 start_response('200 OK', headers)
60 63 return self.stub_response_body
61 64 return fake_app
62 65
63 66 def _create_config(self, extras, repo_name):
64 67 return None
65 68
66 69
67 70 @pytest.fixture
68 71 def vcscontroller(pylonsapp, config_stub):
69 72 config_stub.testing_securitypolicy()
70 73 config_stub.include('rhodecode.authentication')
71 74
72 75 #set_anonymous_access(True)
73 76 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
74 77 app = HttpsFixup(controller, pylonsapp.config)
75 78 app = CustomTestApp(app)
76 79
77 80 _remove_default_user_from_query_cache()
78 81
79 82 # Sanity checks that things are set up correctly
80 83 app.get('/' + HG_REPO, status=200)
81 84
82 85 app.controller = controller
83 86 return app
84 87
85 88
86 89 def _remove_default_user_from_query_cache():
87 90 user = User.get_default_user(cache=True)
88 91 query = Session().query(User).filter(User.username == user.username)
89 92 query = query.options(
90 93 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
91 94 query.invalidate()
92 95 Session().expire(user)
93 96
94 97
95
96
97 98 def test_handles_exceptions_during_permissions_checks(
98 99 vcscontroller, disable_anonymous_user):
99 100 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
100 101 auth_password = base64.encodestring(user_and_pass).strip()
101 102 extra_environ = {
102 103 'AUTH_TYPE': 'Basic',
103 104 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
104 105 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
105 106 }
106 107
107 108 # Verify that things are hooked up correctly
108 109 vcscontroller.get('/', status=200, extra_environ=extra_environ)
109 110
110 111 # Simulate trouble during permission checks
111 112 with mock.patch('rhodecode.model.db.User.get_by_username',
112 113 side_effect=Exception) as get_user:
113 114 # Verify that a correct 500 is returned and check that the expected
114 115 # code path was hit.
115 116 vcscontroller.get('/', status=500, extra_environ=extra_environ)
116 117 assert get_user.called
117 118
118 119
119 120 def test_returns_forbidden_if_no_anonymous_access(
120 121 vcscontroller, disable_anonymous_user):
121 122 vcscontroller.get('/', status=401)
122 123
123 124
124 125 class StubFailVCSController(simplevcs.SimpleVCS):
125 126 def _handle_request(self, environ, start_response):
126 127 raise Exception("BOOM")
127 128
128 129
129 130 @pytest.fixture(scope='module')
130 131 def fail_controller(pylonsapp):
131 132 controller = StubFailVCSController(pylonsapp, pylonsapp.config, None)
132 133 controller = HttpsFixup(controller, pylonsapp.config)
133 134 controller = CustomTestApp(controller)
134 135 return controller
135 136
136 137
137 138 def test_handles_exceptions_as_internal_server_error(fail_controller):
138 139 fail_controller.get('/', status=500)
139 140
140 141
141 142 def test_provides_traceback_for_appenlight(fail_controller):
142 143 response = fail_controller.get(
143 144 '/', status=500, extra_environ={'appenlight.client': 'fake'})
144 145 assert 'appenlight.__traceback' in response.request.environ
145 146
146 147
147 148 def test_provides_utils_scm_app_as_scm_app_by_default(pylonsapp):
148 149 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
149 150 assert controller.scm_app is scm_app_http
150 151
151 152
152 153 def test_allows_to_override_scm_app_via_config(pylonsapp):
153 154 config = pylonsapp.config.copy()
154 155 config['vcs.scm_app_implementation'] = (
155 156 'rhodecode.tests.lib.middleware.mock_scm_app')
156 157 controller = StubVCSController(pylonsapp, config, None)
157 158 assert controller.scm_app is mock_scm_app
158 159
159 160
160 161 @pytest.mark.parametrize('query_string, expected', [
161 162 ('cmd=stub_command', True),
162 163 ('cmd=listkeys', False),
163 164 ])
164 165 def test_should_check_locking(query_string, expected):
165 166 result = simplevcs._should_check_locking(query_string)
166 167 assert result == expected
167 168
168 169
169 170 class TestShadowRepoRegularExpression(object):
170 171 pr_segment = 'pull-request'
171 172 shadow_segment = 'repository'
172 173
173 174 @pytest.mark.parametrize('url, expected', [
174 175 # repo with/without groups
175 176 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
176 177 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
177 178 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
178 179 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
179 180
180 181 # pull request ID
181 182 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
182 183 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
183 184 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
184 185 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
185 186
186 187 # unicode
187 188 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
188 189 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
189 190
190 191 # trailing/leading slash
191 192 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
192 193 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
193 194 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
194 195
195 196 # misc
196 197 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
197 198 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
198 199 ])
199 200 def test_shadow_repo_regular_expression(self, url, expected):
200 201 from rhodecode.lib.middleware.simplevcs import SimpleVCS
201 202 url = url.format(
202 203 pr_segment=self.pr_segment,
203 204 shadow_segment=self.shadow_segment)
204 205 match_obj = SimpleVCS.shadow_repo_re.match(url)
205 206 assert (match_obj is not None) == expected
206 207
207 208
208 209 @pytest.mark.backends('git', 'hg')
209 210 class TestShadowRepoExposure(object):
210 211
211 212 def test_pull_on_shadow_repo_propagates_to_wsgi_app(self, pylonsapp):
212 213 """
213 214 Check that a pull action to a shadow repo is propagated to the
214 215 underlying wsgi app.
215 216 """
216 217 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
217 218 controller._check_ssl = mock.Mock()
218 219 controller.is_shadow_repo = True
219 220 controller._action = 'pull'
220 221 controller.stub_response_body = 'dummy body value'
221 222 environ_stub = {
222 223 'HTTP_HOST': 'test.example.com',
224 'HTTP_ACCEPT': 'application/mercurial',
223 225 'REQUEST_METHOD': 'GET',
224 226 'wsgi.url_scheme': 'http',
225 227 }
226 228
227 229 response = controller(environ_stub, mock.Mock())
228 230 response_body = ''.join(response)
229 231
230 232 # Assert that we got the response from the wsgi app.
231 233 assert response_body == controller.stub_response_body
232 234
233 235 def test_push_on_shadow_repo_raises(self, pylonsapp):
234 236 """
235 237 Check that a push action to a shadow repo is aborted.
236 238 """
237 239 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
238 240 controller._check_ssl = mock.Mock()
239 241 controller.is_shadow_repo = True
240 242 controller._action = 'push'
241 243 controller.stub_response_body = 'dummy body value'
242 244 environ_stub = {
243 245 'HTTP_HOST': 'test.example.com',
246 'HTTP_ACCEPT': 'application/mercurial',
244 247 'REQUEST_METHOD': 'GET',
245 248 'wsgi.url_scheme': 'http',
246 249 }
247 250
248 251 response = controller(environ_stub, mock.Mock())
249 252 response_body = ''.join(response)
250 253
251 254 assert response_body != controller.stub_response_body
252 255 # Assert that a 406 error is returned.
253 256 assert '406 Not Acceptable' in response_body
254 257
255 258 def test_set_repo_names_no_shadow(self, pylonsapp):
256 259 """
257 260 Check that the set_repo_names method sets all names to the one returned
258 261 by the _get_repository_name method on a request to a non shadow repo.
259 262 """
260 263 environ_stub = {}
261 264 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
262 265 controller._name = 'RepoGroup/MyRepo'
263 266 controller.set_repo_names(environ_stub)
264 267 assert not controller.is_shadow_repo
265 268 assert (controller.url_repo_name ==
266 269 controller.acl_repo_name ==
267 270 controller.vcs_repo_name ==
268 271 controller._get_repository_name(environ_stub))
269 272
270 273 def test_set_repo_names_with_shadow(self, pylonsapp, pr_util, config_stub):
271 274 """
272 275 Check that the set_repo_names method sets correct names on a request
273 276 to a shadow repo.
274 277 """
275 278 from rhodecode.model.pull_request import PullRequestModel
276 279
277 280 pull_request = pr_util.create_pull_request()
278 281 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
279 282 target=pull_request.target_repo.repo_name,
280 283 pr_id=pull_request.pull_request_id,
281 284 pr_segment=TestShadowRepoRegularExpression.pr_segment,
282 285 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
283 286 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
284 287 controller._name = shadow_url
285 288 controller.set_repo_names({})
286 289
287 290 # Get file system path to shadow repo for assertions.
288 291 workspace_id = PullRequestModel()._workspace_id(pull_request)
289 292 target_vcs = pull_request.target_repo.scm_instance()
290 293 vcs_repo_name = target_vcs._get_shadow_repository_path(
291 294 workspace_id)
292 295
293 296 assert controller.vcs_repo_name == vcs_repo_name
294 297 assert controller.url_repo_name == shadow_url
295 298 assert controller.acl_repo_name == pull_request.target_repo.repo_name
296 299 assert controller.is_shadow_repo
297 300
298 301 def test_set_repo_names_with_shadow_but_missing_pr(
299 302 self, pylonsapp, pr_util, config_stub):
300 303 """
301 304 Checks that the set_repo_names method enforces matching target repos
302 305 and pull request IDs.
303 306 """
304 307 pull_request = pr_util.create_pull_request()
305 308 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
306 309 target=pull_request.target_repo.repo_name,
307 310 pr_id=999999999,
308 311 pr_segment=TestShadowRepoRegularExpression.pr_segment,
309 312 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
310 313 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
311 314 controller._name = shadow_url
312 315 controller.set_repo_names({})
313 316
314 317 assert not controller.is_shadow_repo
315 318 assert (controller.url_repo_name ==
316 319 controller.acl_repo_name ==
317 320 controller.vcs_repo_name)
318 321
319 322
320 323 @pytest.mark.usefixtures('db')
321 324 class TestGenerateVcsResponse(object):
322 325
323 326 def test_ensures_that_start_response_is_called_early_enough(self):
324 327 self.call_controller_with_response_body(iter(['a', 'b']))
325 328 assert self.start_response.called
326 329
327 330 def test_invalidates_cache_after_body_is_consumed(self):
328 331 result = self.call_controller_with_response_body(iter(['a', 'b']))
329 332 assert not self.was_cache_invalidated()
330 333 # Consume the result
331 334 list(result)
332 335 assert self.was_cache_invalidated()
333 336
334 337 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
335 338 def test_handles_locking_exception(self, http_locked_rc):
336 339 result = self.call_controller_with_response_body(
337 340 self.raise_result_iter(vcs_kind='repo_locked'))
338 341 assert not http_locked_rc.called
339 342 # Consume the result
340 343 list(result)
341 344 assert http_locked_rc.called
342 345
343 346 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
344 347 def test_handles_requirement_exception(self, http_requirement):
345 348 result = self.call_controller_with_response_body(
346 349 self.raise_result_iter(vcs_kind='requirement'))
347 350 assert not http_requirement.called
348 351 # Consume the result
349 352 list(result)
350 353 assert http_requirement.called
351 354
352 355 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
353 356 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
354 357 app_factory_patcher = mock.patch.object(
355 358 StubVCSController, '_create_wsgi_app')
356 359 with app_factory_patcher as app_factory:
357 360 app_factory().side_effect = self.vcs_exception()
358 361 result = self.call_controller_with_response_body(['a'])
359 362 list(result)
360 363 assert http_locked_rc.called
361 364
362 365 def test_raises_unknown_exceptions(self):
363 366 result = self.call_controller_with_response_body(
364 367 self.raise_result_iter(vcs_kind='unknown'))
365 368 with pytest.raises(Exception):
366 369 list(result)
367 370
368 371 def test_prepare_callback_daemon_is_called(self):
369 372 def side_effect(extras):
370 373 return DummyHooksCallbackDaemon(), extras
371 374
372 375 prepare_patcher = mock.patch.object(
373 376 StubVCSController, '_prepare_callback_daemon')
374 377 with prepare_patcher as prepare_mock:
375 378 prepare_mock.side_effect = side_effect
376 379 self.call_controller_with_response_body(iter(['a', 'b']))
377 380 assert prepare_mock.called
378 381 assert prepare_mock.call_count == 1
379 382
380 383 def call_controller_with_response_body(self, response_body):
381 384 settings = {
382 385 'base_path': 'fake_base_path',
383 386 'vcs.hooks.protocol': 'http',
384 387 'vcs.hooks.direct_calls': False,
385 388 }
386 389 controller = StubVCSController(None, settings, None)
387 390 controller._invalidate_cache = mock.Mock()
388 391 controller.stub_response_body = response_body
389 392 self.start_response = mock.Mock()
390 393 result = controller._generate_vcs_response(
391 394 environ={}, start_response=self.start_response,
392 395 repo_path='fake_repo_path',
393 396 extras={}, action='push')
394 397 self.controller = controller
395 398 return result
396 399
397 400 def raise_result_iter(self, vcs_kind='repo_locked'):
398 401 """
399 402 Simulates an exception due to a vcs raised exception if kind vcs_kind
400 403 """
401 404 raise self.vcs_exception(vcs_kind=vcs_kind)
402 405 yield "never_reached"
403 406
404 407 def vcs_exception(self, vcs_kind='repo_locked'):
405 408 locked_exception = Exception('TEST_MESSAGE')
406 409 locked_exception._vcs_kind = vcs_kind
407 410 return locked_exception
408 411
409 412 def was_cache_invalidated(self):
410 413 return self.controller._invalidate_cache.called
411 414
412 415
413 416 class TestInitializeGenerator(object):
414 417
415 418 def test_drains_first_element(self):
416 419 gen = self.factory(['__init__', 1, 2])
417 420 result = list(gen)
418 421 assert result == [1, 2]
419 422
420 423 @pytest.mark.parametrize('values', [
421 424 [],
422 425 [1, 2],
423 426 ])
424 427 def test_raises_value_error(self, values):
425 428 with pytest.raises(ValueError):
426 429 self.factory(values)
427 430
428 431 @simplevcs.initialize_generator
429 432 def factory(self, iterable):
430 433 for elem in iterable:
431 434 yield elem
432 435
433 436
434 437 class TestPrepareHooksDaemon(object):
435 438 def test_calls_imported_prepare_callback_daemon(self, app_settings):
436 439 expected_extras = {'extra1': 'value1'}
437 440 daemon = DummyHooksCallbackDaemon()
438 441
439 442 controller = StubVCSController(None, app_settings, None)
440 443 prepare_patcher = mock.patch.object(
441 444 simplevcs, 'prepare_callback_daemon',
442 445 return_value=(daemon, expected_extras))
443 446 with prepare_patcher as prepare_mock:
444 447 callback_daemon, extras = controller._prepare_callback_daemon(
445 448 expected_extras.copy())
446 449 prepare_mock.assert_called_once_with(
447 450 expected_extras,
448 451 protocol=app_settings['vcs.hooks.protocol'],
449 452 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
450 453
451 454 assert callback_daemon == daemon
452 455 assert extras == extras
@@ -1,423 +1,424 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import threading
22 22 import time
23 23 import logging
24 24 import os.path
25 25 import subprocess32
26 26 import tempfile
27 27 import urllib2
28 28 from lxml.html import fromstring, tostring
29 29 from lxml.cssselect import CSSSelector
30 30 from urlparse import urlparse, parse_qsl
31 31 from urllib import unquote_plus
32 import webob
32 33
33 from webtest.app import (
34 Request, TestResponse, TestApp, print_stderr, string_types)
34 from webtest.app import TestResponse, TestApp, string_types
35 from webtest.compat import print_stderr
35 36
36 37 import pytest
37 38 import rc_testdata
38 39
39 40 from rhodecode.model.db import User, Repository
40 41 from rhodecode.model.meta import Session
41 42 from rhodecode.model.scm import ScmModel
42 43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 45
45 46
46 47 log = logging.getLogger(__name__)
47 48
48 49
49 50 class CustomTestResponse(TestResponse):
50 51 def _save_output(self, out):
51 52 f = tempfile.NamedTemporaryFile(
52 53 delete=False, prefix='rc-test-', suffix='.html')
53 54 f.write(out)
54 55 return f.name
55 56
56 57 def mustcontain(self, *strings, **kw):
57 58 """
58 59 Assert that the response contains all of the strings passed
59 60 in as arguments.
60 61
61 62 Equivalent to::
62 63
63 64 assert string in res
64 65 """
65 66 if 'no' in kw:
66 67 no = kw['no']
67 68 del kw['no']
68 69 if isinstance(no, string_types):
69 70 no = [no]
70 71 else:
71 72 no = []
72 73 if kw:
73 74 raise TypeError(
74 75 "The only keyword argument allowed is 'no'")
75 76
76 77 f = self._save_output(str(self))
77 78
78 79 for s in strings:
79 80 if not s in self:
80 81 print_stderr("Actual response (no %r):" % s)
81 82 print_stderr(str(self))
82 83 raise IndexError(
83 84 "Body does not contain string %r, output saved as %s" % (
84 85 s, f))
85 86
86 87 for no_s in no:
87 88 if no_s in self:
88 89 print_stderr("Actual response (has %r)" % no_s)
89 90 print_stderr(str(self))
90 91 raise IndexError(
91 92 "Body contains bad string %r, output saved as %s" % (
92 93 no_s, f))
93 94
94 95 def assert_response(self):
95 96 return AssertResponse(self)
96 97
97 98 def get_session_from_response(self):
98 99 """
99 100 This returns the session from a response object. Pylons has some magic
100 101 to make the session available as `response.session`. But pyramid
101 102 doesn't expose it.
102 103 """
103 104 return self.request.environ['beaker.session']
104 105
105 106
106 class TestRequest(Request):
107 class TestRequest(webob.BaseRequest):
107 108
108 109 # for py.test
109 110 disabled = True
110 111 ResponseClass = CustomTestResponse
111 112
112 113
113 114 class CustomTestApp(TestApp):
114 115 """
115 116 Custom app to make mustcontain more usefull
116 117 """
117 118 RequestClass = TestRequest
118 119
119 120
120 121 def set_anonymous_access(enabled):
121 122 """(Dis)allows anonymous access depending on parameter `enabled`"""
122 123 user = User.get_default_user()
123 124 user.active = enabled
124 125 Session().add(user)
125 126 Session().commit()
126 127 time.sleep(1.5) # must sleep for cache (1s to expire)
127 128 log.info('anonymous access is now: %s', enabled)
128 129 assert enabled == User.get_default_user().active, (
129 130 'Cannot set anonymous access')
130 131
131 132
132 133 def check_xfail_backends(node, backend_alias):
133 134 # Using "xfail_backends" here intentionally, since this marks work
134 135 # which is "to be done" soon.
135 136 skip_marker = node.get_marker('xfail_backends')
136 137 if skip_marker and backend_alias in skip_marker.args:
137 138 msg = "Support for backend %s to be developed." % (backend_alias, )
138 139 msg = skip_marker.kwargs.get('reason', msg)
139 140 pytest.xfail(msg)
140 141
141 142
142 143 def check_skip_backends(node, backend_alias):
143 144 # Using "skip_backends" here intentionally, since this marks work which is
144 145 # not supported.
145 146 skip_marker = node.get_marker('skip_backends')
146 147 if skip_marker and backend_alias in skip_marker.args:
147 148 msg = "Feature not supported for backend %s." % (backend_alias, )
148 149 msg = skip_marker.kwargs.get('reason', msg)
149 150 pytest.skip(msg)
150 151
151 152
152 153 def extract_git_repo_from_dump(dump_name, repo_name):
153 154 """Create git repo `repo_name` from dump `dump_name`."""
154 155 repos_path = ScmModel().repos_path
155 156 target_path = os.path.join(repos_path, repo_name)
156 157 rc_testdata.extract_git_dump(dump_name, target_path)
157 158 return target_path
158 159
159 160
160 161 def extract_hg_repo_from_dump(dump_name, repo_name):
161 162 """Create hg repo `repo_name` from dump `dump_name`."""
162 163 repos_path = ScmModel().repos_path
163 164 target_path = os.path.join(repos_path, repo_name)
164 165 rc_testdata.extract_hg_dump(dump_name, target_path)
165 166 return target_path
166 167
167 168
168 169 def extract_svn_repo_from_dump(dump_name, repo_name):
169 170 """Create a svn repo `repo_name` from dump `dump_name`."""
170 171 repos_path = ScmModel().repos_path
171 172 target_path = os.path.join(repos_path, repo_name)
172 173 SubversionRepository(target_path, create=True)
173 174 _load_svn_dump_into_repo(dump_name, target_path)
174 175 return target_path
175 176
176 177
177 178 def assert_message_in_log(log_records, message, levelno, module):
178 179 messages = [
179 180 r.message for r in log_records
180 181 if r.module == module and r.levelno == levelno
181 182 ]
182 183 assert message in messages
183 184
184 185
185 186 def _load_svn_dump_into_repo(dump_name, repo_path):
186 187 """
187 188 Utility to populate a svn repository with a named dump
188 189
189 190 Currently the dumps are in rc_testdata. They might later on be
190 191 integrated with the main repository once they stabilize more.
191 192 """
192 193 dump = rc_testdata.load_svn_dump(dump_name)
193 194 load_dump = subprocess32.Popen(
194 195 ['svnadmin', 'load', repo_path],
195 196 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
196 197 stderr=subprocess32.PIPE)
197 198 out, err = load_dump.communicate(dump)
198 199 if load_dump.returncode != 0:
199 200 log.error("Output of load_dump command: %s", out)
200 201 log.error("Error output of load_dump command: %s", err)
201 202 raise Exception(
202 203 'Failed to load dump "%s" into repository at path "%s".'
203 204 % (dump_name, repo_path))
204 205
205 206
206 207 class AssertResponse(object):
207 208 """
208 209 Utility that helps to assert things about a given HTML response.
209 210 """
210 211
211 212 def __init__(self, response):
212 213 self.response = response
213 214
214 215 def get_imports(self):
215 216 return fromstring, tostring, CSSSelector
216 217
217 218 def one_element_exists(self, css_selector):
218 219 self.get_element(css_selector)
219 220
220 221 def no_element_exists(self, css_selector):
221 222 assert not self._get_elements(css_selector)
222 223
223 224 def element_equals_to(self, css_selector, expected_content):
224 225 element = self.get_element(css_selector)
225 226 element_text = self._element_to_string(element)
226 227 assert expected_content in element_text
227 228
228 229 def element_contains(self, css_selector, expected_content):
229 230 element = self.get_element(css_selector)
230 231 assert expected_content in element.text_content()
231 232
232 233 def element_value_contains(self, css_selector, expected_content):
233 234 element = self.get_element(css_selector)
234 235 assert expected_content in element.value
235 236
236 237 def contains_one_link(self, link_text, href):
237 238 fromstring, tostring, CSSSelector = self.get_imports()
238 239 doc = fromstring(self.response.body)
239 240 sel = CSSSelector('a[href]')
240 241 elements = [
241 242 e for e in sel(doc) if e.text_content().strip() == link_text]
242 243 assert len(elements) == 1, "Did not find link or found multiple links"
243 244 self._ensure_url_equal(elements[0].attrib.get('href'), href)
244 245
245 246 def contains_one_anchor(self, anchor_id):
246 247 fromstring, tostring, CSSSelector = self.get_imports()
247 248 doc = fromstring(self.response.body)
248 249 sel = CSSSelector('#' + anchor_id)
249 250 elements = sel(doc)
250 251 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
251 252
252 253 def _ensure_url_equal(self, found, expected):
253 254 assert _Url(found) == _Url(expected)
254 255
255 256 def get_element(self, css_selector):
256 257 elements = self._get_elements(css_selector)
257 258 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
258 259 return elements[0]
259 260
260 261 def get_elements(self, css_selector):
261 262 return self._get_elements(css_selector)
262 263
263 264 def _get_elements(self, css_selector):
264 265 fromstring, tostring, CSSSelector = self.get_imports()
265 266 doc = fromstring(self.response.body)
266 267 sel = CSSSelector(css_selector)
267 268 elements = sel(doc)
268 269 return elements
269 270
270 271 def _element_to_string(self, element):
271 272 fromstring, tostring, CSSSelector = self.get_imports()
272 273 return tostring(element)
273 274
274 275
275 276 class _Url(object):
276 277 """
277 278 A url object that can be compared with other url orbjects
278 279 without regard to the vagaries of encoding, escaping, and ordering
279 280 of parameters in query strings.
280 281
281 282 Inspired by
282 283 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
283 284 """
284 285
285 286 def __init__(self, url):
286 287 parts = urlparse(url)
287 288 _query = frozenset(parse_qsl(parts.query))
288 289 _path = unquote_plus(parts.path)
289 290 parts = parts._replace(query=_query, path=_path)
290 291 self.parts = parts
291 292
292 293 def __eq__(self, other):
293 294 return self.parts == other.parts
294 295
295 296 def __hash__(self):
296 297 return hash(self.parts)
297 298
298 299
299 300 def run_test_concurrently(times, raise_catched_exc=True):
300 301 """
301 302 Add this decorator to small pieces of code that you want to test
302 303 concurrently
303 304
304 305 ex:
305 306
306 307 @test_concurrently(25)
307 308 def my_test_function():
308 309 ...
309 310 """
310 311 def test_concurrently_decorator(test_func):
311 312 def wrapper(*args, **kwargs):
312 313 exceptions = []
313 314
314 315 def call_test_func():
315 316 try:
316 317 test_func(*args, **kwargs)
317 318 except Exception as e:
318 319 exceptions.append(e)
319 320 if raise_catched_exc:
320 321 raise
321 322 threads = []
322 323 for i in range(times):
323 324 threads.append(threading.Thread(target=call_test_func))
324 325 for t in threads:
325 326 t.start()
326 327 for t in threads:
327 328 t.join()
328 329 if exceptions:
329 330 raise Exception(
330 331 'test_concurrently intercepted %s exceptions: %s' % (
331 332 len(exceptions), exceptions))
332 333 return wrapper
333 334 return test_concurrently_decorator
334 335
335 336
336 337 def wait_for_url(url, timeout=10):
337 338 """
338 339 Wait until URL becomes reachable.
339 340
340 341 It polls the URL until the timeout is reached or it became reachable.
341 342 If will call to `py.test.fail` in case the URL is not reachable.
342 343 """
343 344 timeout = time.time() + timeout
344 345 last = 0
345 346 wait = 0.1
346 347
347 348 while timeout > last:
348 349 last = time.time()
349 350 if is_url_reachable(url):
350 351 break
351 352 elif (last + wait) > time.time():
352 353 # Go to sleep because not enough time has passed since last check.
353 354 time.sleep(wait)
354 355 else:
355 356 pytest.fail("Timeout while waiting for URL {}".format(url))
356 357
357 358
358 359 def is_url_reachable(url):
359 360 try:
360 361 urllib2.urlopen(url)
361 362 except urllib2.URLError:
362 363 return False
363 364 return True
364 365
365 366
366 367 def repo_on_filesystem(repo_name):
367 368 from rhodecode.lib import vcs
368 369 from rhodecode.tests import TESTS_TMP_PATH
369 370 repo = vcs.get_vcs_instance(
370 371 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
371 372 return repo is not None
372 373
373 374
374 375 def commit_change(
375 376 repo, filename, content, message, vcs_type, parent=None, newfile=False):
376 377 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
377 378
378 379 repo = Repository.get_by_repo_name(repo)
379 380 _commit = parent
380 381 if not parent:
381 382 _commit = EmptyCommit(alias=vcs_type)
382 383
383 384 if newfile:
384 385 nodes = {
385 386 filename: {
386 387 'content': content
387 388 }
388 389 }
389 390 commit = ScmModel().create_nodes(
390 391 user=TEST_USER_ADMIN_LOGIN, repo=repo,
391 392 message=message,
392 393 nodes=nodes,
393 394 parent_commit=_commit,
394 395 author=TEST_USER_ADMIN_LOGIN,
395 396 )
396 397 else:
397 398 commit = ScmModel().commit_change(
398 399 repo=repo.scm_instance(), repo_name=repo.repo_name,
399 400 commit=parent, user=TEST_USER_ADMIN_LOGIN,
400 401 author=TEST_USER_ADMIN_LOGIN,
401 402 message=message,
402 403 content=content,
403 404 f_path=filename
404 405 )
405 406 return commit
406 407
407 408
408 409 def add_test_routes(config):
409 410 """
410 411 Adds test routing that can be used in different functional tests
411 412
412 413 """
413 414 config.add_route(name='home', pattern='/')
414 415 config.add_route(name='repo_summary', pattern='/{repo_name}')
415 416 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
416 417 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
417 418
418 419 config.add_route(name='pullrequest_show',
419 420 pattern='/{repo_name}/pull-request/{pull_request_id}')
420 421 config.add_route(name='pull_requests_global',
421 422 pattern='/pull-request/{pull_request_id}')
422 423 config.add_route(name='repo_commit',
423 424 pattern='/{repo_name}/changeset/{commit_id}')
General Comments 0
You need to be logged in to leave comments. Login now