##// END OF EJS Templates
dependencies: bumped pyramid-debugtoolbar to 4.3.1
marcink -
r1907:d622a0b9 default
parent child Browse files
Show More
@@ -1,2086 +1,2086 b''
1 1 # Generated by pip2nix 0.4.0
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 {
5 5 Babel = super.buildPythonPackage {
6 6 name = "Babel-1.3";
7 7 buildInputs = with self; [];
8 8 doCheck = false;
9 9 propagatedBuildInputs = with self; [pytz];
10 10 src = fetchurl {
11 11 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
12 12 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 16 };
17 17 };
18 18 Beaker = super.buildPythonPackage {
19 19 name = "Beaker-1.9.0";
20 20 buildInputs = with self; [];
21 21 doCheck = false;
22 22 propagatedBuildInputs = with self; [funcsigs];
23 23 src = fetchurl {
24 24 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
25 25 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
26 26 };
27 27 meta = {
28 28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 29 };
30 30 };
31 31 CProfileV = super.buildPythonPackage {
32 32 name = "CProfileV-1.0.7";
33 33 buildInputs = with self; [];
34 34 doCheck = false;
35 35 propagatedBuildInputs = with self; [bottle];
36 36 src = fetchurl {
37 37 url = "https://pypi.python.org/packages/df/50/d8c1ada7d537c64b0f76453fa31dedb6af6e27b82fcf0331e5f71a4cf98b/CProfileV-1.0.7.tar.gz";
38 38 md5 = "db4c7640438aa3d8887e194c81c7a019";
39 39 };
40 40 meta = {
41 41 license = [ pkgs.lib.licenses.mit ];
42 42 };
43 43 };
44 44 Chameleon = super.buildPythonPackage {
45 45 name = "Chameleon-2.24";
46 46 buildInputs = with self; [];
47 47 doCheck = false;
48 48 propagatedBuildInputs = with self; [];
49 49 src = fetchurl {
50 50 url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
51 51 md5 = "1b01f1f6533a8a11d0d2f2366dec5342";
52 52 };
53 53 meta = {
54 54 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
55 55 };
56 56 };
57 57 FormEncode = super.buildPythonPackage {
58 58 name = "FormEncode-1.2.4";
59 59 buildInputs = with self; [];
60 60 doCheck = false;
61 61 propagatedBuildInputs = with self; [];
62 62 src = fetchurl {
63 63 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
64 64 md5 = "6bc17fb9aed8aea198975e888e2077f4";
65 65 };
66 66 meta = {
67 67 license = [ pkgs.lib.licenses.psfl ];
68 68 };
69 69 };
70 70 Jinja2 = super.buildPythonPackage {
71 71 name = "Jinja2-2.7.3";
72 72 buildInputs = with self; [];
73 73 doCheck = false;
74 74 propagatedBuildInputs = with self; [MarkupSafe];
75 75 src = fetchurl {
76 76 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
77 77 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
78 78 };
79 79 meta = {
80 80 license = [ pkgs.lib.licenses.bsdOriginal ];
81 81 };
82 82 };
83 83 Mako = super.buildPythonPackage {
84 84 name = "Mako-1.0.6";
85 85 buildInputs = with self; [];
86 86 doCheck = false;
87 87 propagatedBuildInputs = with self; [MarkupSafe];
88 88 src = fetchurl {
89 89 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
90 90 md5 = "a28e22a339080316b2acc352b9ee631c";
91 91 };
92 92 meta = {
93 93 license = [ pkgs.lib.licenses.mit ];
94 94 };
95 95 };
96 96 Markdown = super.buildPythonPackage {
97 97 name = "Markdown-2.6.8";
98 98 buildInputs = with self; [];
99 99 doCheck = false;
100 100 propagatedBuildInputs = with self; [];
101 101 src = fetchurl {
102 102 url = "https://pypi.python.org/packages/1d/25/3f6d2cb31ec42ca5bd3bfbea99b63892b735d76e26f20dd2dcc34ffe4f0d/Markdown-2.6.8.tar.gz";
103 103 md5 = "d9ef057a5bd185f6f536400a31fc5d45";
104 104 };
105 105 meta = {
106 106 license = [ pkgs.lib.licenses.bsdOriginal ];
107 107 };
108 108 };
109 109 MarkupSafe = super.buildPythonPackage {
110 110 name = "MarkupSafe-0.23";
111 111 buildInputs = with self; [];
112 112 doCheck = false;
113 113 propagatedBuildInputs = with self; [];
114 114 src = fetchurl {
115 115 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
116 116 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
117 117 };
118 118 meta = {
119 119 license = [ pkgs.lib.licenses.bsdOriginal ];
120 120 };
121 121 };
122 122 MySQL-python = super.buildPythonPackage {
123 123 name = "MySQL-python-1.2.5";
124 124 buildInputs = with self; [];
125 125 doCheck = false;
126 126 propagatedBuildInputs = with self; [];
127 127 src = fetchurl {
128 128 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
129 129 md5 = "654f75b302db6ed8dc5a898c625e030c";
130 130 };
131 131 meta = {
132 132 license = [ pkgs.lib.licenses.gpl1 ];
133 133 };
134 134 };
135 135 Paste = super.buildPythonPackage {
136 136 name = "Paste-2.0.3";
137 137 buildInputs = with self; [];
138 138 doCheck = false;
139 139 propagatedBuildInputs = with self; [six];
140 140 src = fetchurl {
141 141 url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz";
142 142 md5 = "1231e14eae62fa7ed76e9130b04bc61e";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 PasteDeploy = super.buildPythonPackage {
149 149 name = "PasteDeploy-1.5.2";
150 150 buildInputs = with self; [];
151 151 doCheck = false;
152 152 propagatedBuildInputs = with self; [];
153 153 src = fetchurl {
154 154 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
155 155 md5 = "352b7205c78c8de4987578d19431af3b";
156 156 };
157 157 meta = {
158 158 license = [ pkgs.lib.licenses.mit ];
159 159 };
160 160 };
161 161 PasteScript = super.buildPythonPackage {
162 162 name = "PasteScript-1.7.5";
163 163 buildInputs = with self; [];
164 164 doCheck = false;
165 165 propagatedBuildInputs = with self; [Paste PasteDeploy];
166 166 src = fetchurl {
167 167 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
168 168 md5 = "4c72d78dcb6bb993f30536842c16af4d";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.mit ];
172 172 };
173 173 };
174 174 Pygments = super.buildPythonPackage {
175 175 name = "Pygments-2.2.0";
176 176 buildInputs = with self; [];
177 177 doCheck = false;
178 178 propagatedBuildInputs = with self; [];
179 179 src = fetchurl {
180 180 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
181 181 md5 = "13037baca42f16917cbd5ad2fab50844";
182 182 };
183 183 meta = {
184 184 license = [ pkgs.lib.licenses.bsdOriginal ];
185 185 };
186 186 };
187 187 Pylons = super.buildPythonPackage {
188 188 name = "Pylons-1.0.2.dev20170630";
189 189 buildInputs = with self; [];
190 190 doCheck = false;
191 191 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
192 192 src = fetchurl {
193 193 url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f";
194 194 md5 = "f26633726fa2cd3a340316ee6a5d218f";
195 195 };
196 196 meta = {
197 197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 198 };
199 199 };
200 200 Routes = super.buildPythonPackage {
201 201 name = "Routes-1.13";
202 202 buildInputs = with self; [];
203 203 doCheck = false;
204 204 propagatedBuildInputs = with self; [repoze.lru];
205 205 src = fetchurl {
206 206 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
207 207 md5 = "d527b0ab7dd9172b1275a41f97448783";
208 208 };
209 209 meta = {
210 210 license = [ pkgs.lib.licenses.bsdOriginal ];
211 211 };
212 212 };
213 213 SQLAlchemy = super.buildPythonPackage {
214 214 name = "SQLAlchemy-0.9.9";
215 215 buildInputs = with self; [];
216 216 doCheck = false;
217 217 propagatedBuildInputs = with self; [];
218 218 src = fetchurl {
219 219 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
220 220 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
221 221 };
222 222 meta = {
223 223 license = [ pkgs.lib.licenses.mit ];
224 224 };
225 225 };
226 226 Sphinx = super.buildPythonPackage {
227 227 name = "Sphinx-1.2.2";
228 228 buildInputs = with self; [];
229 229 doCheck = false;
230 230 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
231 231 src = fetchurl {
232 232 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
233 233 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
234 234 };
235 235 meta = {
236 236 license = [ pkgs.lib.licenses.bsdOriginal ];
237 237 };
238 238 };
239 239 Tempita = super.buildPythonPackage {
240 240 name = "Tempita-0.5.2";
241 241 buildInputs = with self; [];
242 242 doCheck = false;
243 243 propagatedBuildInputs = with self; [];
244 244 src = fetchurl {
245 245 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
246 246 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
247 247 };
248 248 meta = {
249 249 license = [ pkgs.lib.licenses.mit ];
250 250 };
251 251 };
252 252 URLObject = super.buildPythonPackage {
253 253 name = "URLObject-2.4.0";
254 254 buildInputs = with self; [];
255 255 doCheck = false;
256 256 propagatedBuildInputs = with self; [];
257 257 src = fetchurl {
258 258 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
259 259 md5 = "2ed819738a9f0a3051f31dc9924e3065";
260 260 };
261 261 meta = {
262 262 license = [ ];
263 263 };
264 264 };
265 265 WebError = super.buildPythonPackage {
266 266 name = "WebError-0.10.3";
267 267 buildInputs = with self; [];
268 268 doCheck = false;
269 269 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
270 270 src = fetchurl {
271 271 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
272 272 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
273 273 };
274 274 meta = {
275 275 license = [ pkgs.lib.licenses.mit ];
276 276 };
277 277 };
278 278 WebHelpers = super.buildPythonPackage {
279 279 name = "WebHelpers-1.3";
280 280 buildInputs = with self; [];
281 281 doCheck = false;
282 282 propagatedBuildInputs = with self; [MarkupSafe];
283 283 src = fetchurl {
284 284 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
285 285 md5 = "32749ffadfc40fea51075a7def32588b";
286 286 };
287 287 meta = {
288 288 license = [ pkgs.lib.licenses.bsdOriginal ];
289 289 };
290 290 };
291 291 WebHelpers2 = super.buildPythonPackage {
292 292 name = "WebHelpers2-2.0";
293 293 buildInputs = with self; [];
294 294 doCheck = false;
295 295 propagatedBuildInputs = with self; [MarkupSafe six];
296 296 src = fetchurl {
297 297 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
298 298 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 304 WebOb = super.buildPythonPackage {
305 305 name = "WebOb-1.7.3";
306 306 buildInputs = with self; [];
307 307 doCheck = false;
308 308 propagatedBuildInputs = with self; [];
309 309 src = fetchurl {
310 310 url = "https://pypi.python.org/packages/46/87/2f96d8d43b2078fae6e1d33fa86b95c228cebed060f4e3c7576cc44ea83b/WebOb-1.7.3.tar.gz";
311 311 md5 = "350028baffc508e3d23c078118e35316";
312 312 };
313 313 meta = {
314 314 license = [ pkgs.lib.licenses.mit ];
315 315 };
316 316 };
317 317 WebTest = super.buildPythonPackage {
318 318 name = "WebTest-2.0.27";
319 319 buildInputs = with self; [];
320 320 doCheck = false;
321 321 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
322 322 src = fetchurl {
323 323 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
324 324 md5 = "54e6515ac71c51b6fc90179483c749ad";
325 325 };
326 326 meta = {
327 327 license = [ pkgs.lib.licenses.mit ];
328 328 };
329 329 };
330 330 Whoosh = super.buildPythonPackage {
331 331 name = "Whoosh-2.7.4";
332 332 buildInputs = with self; [];
333 333 doCheck = false;
334 334 propagatedBuildInputs = with self; [];
335 335 src = fetchurl {
336 336 url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
337 337 md5 = "c2710105f20b3e29936bd2357383c325";
338 338 };
339 339 meta = {
340 340 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
341 341 };
342 342 };
343 343 alembic = super.buildPythonPackage {
344 344 name = "alembic-0.9.2";
345 345 buildInputs = with self; [];
346 346 doCheck = false;
347 347 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor python-dateutil];
348 348 src = fetchurl {
349 349 url = "https://pypi.python.org/packages/78/48/b5b26e7218b415f40b60b92c53853d242e5456c0f19f6c66101d98ff5f2a/alembic-0.9.2.tar.gz";
350 350 md5 = "40daf8bae50969beea40efaaf0839ff4";
351 351 };
352 352 meta = {
353 353 license = [ pkgs.lib.licenses.mit ];
354 354 };
355 355 };
356 356 amqplib = super.buildPythonPackage {
357 357 name = "amqplib-1.0.2";
358 358 buildInputs = with self; [];
359 359 doCheck = false;
360 360 propagatedBuildInputs = with self; [];
361 361 src = fetchurl {
362 362 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
363 363 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
364 364 };
365 365 meta = {
366 366 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
367 367 };
368 368 };
369 369 anyjson = super.buildPythonPackage {
370 370 name = "anyjson-0.3.3";
371 371 buildInputs = with self; [];
372 372 doCheck = false;
373 373 propagatedBuildInputs = with self; [];
374 374 src = fetchurl {
375 375 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
376 376 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 380 };
381 381 };
382 382 appenlight-client = super.buildPythonPackage {
383 383 name = "appenlight-client-0.6.21";
384 384 buildInputs = with self; [];
385 385 doCheck = false;
386 386 propagatedBuildInputs = with self; [WebOb requests six];
387 387 src = fetchurl {
388 388 url = "https://pypi.python.org/packages/c9/23/91b66cfa0b963662c10b2a06ccaadf3f3a4848a7a2aa16255cb43d5160ec/appenlight_client-0.6.21.tar.gz";
389 389 md5 = "273999ac854fdaefa8d0fb61965a4ed9";
390 390 };
391 391 meta = {
392 392 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
393 393 };
394 394 };
395 395 authomatic = super.buildPythonPackage {
396 396 name = "authomatic-0.1.0.post1";
397 397 buildInputs = with self; [];
398 398 doCheck = false;
399 399 propagatedBuildInputs = with self; [];
400 400 src = fetchurl {
401 401 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
402 402 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.mit ];
406 406 };
407 407 };
408 backport-ipaddress = super.buildPythonPackage {
409 name = "backport-ipaddress-0.1";
410 buildInputs = with self; [];
411 doCheck = false;
412 propagatedBuildInputs = with self; [];
413 src = fetchurl {
414 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
415 md5 = "9c1f45f4361f71b124d7293a60006c05";
416 };
417 meta = {
418 license = [ pkgs.lib.licenses.psfl ];
419 };
420 };
421 408 backports.shutil-get-terminal-size = super.buildPythonPackage {
422 409 name = "backports.shutil-get-terminal-size-1.0.0";
423 410 buildInputs = with self; [];
424 411 doCheck = false;
425 412 propagatedBuildInputs = with self; [];
426 413 src = fetchurl {
427 414 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
428 415 md5 = "03267762480bd86b50580dc19dff3c66";
429 416 };
430 417 meta = {
431 418 license = [ pkgs.lib.licenses.mit ];
432 419 };
433 420 };
434 421 beautifulsoup4 = super.buildPythonPackage {
435 422 name = "beautifulsoup4-4.6.0";
436 423 buildInputs = with self; [];
437 424 doCheck = false;
438 425 propagatedBuildInputs = with self; [];
439 426 src = fetchurl {
440 427 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
441 428 md5 = "c17714d0f91a23b708a592cb3c697728";
442 429 };
443 430 meta = {
444 431 license = [ pkgs.lib.licenses.mit ];
445 432 };
446 433 };
447 434 bleach = super.buildPythonPackage {
448 435 name = "bleach-1.5.0";
449 436 buildInputs = with self; [];
450 437 doCheck = false;
451 438 propagatedBuildInputs = with self; [six html5lib];
452 439 src = fetchurl {
453 440 url = "https://pypi.python.org/packages/99/00/25a8fce4de102bf6e3cc76bc4ea60685b2fee33bde1b34830c70cacc26a7/bleach-1.5.0.tar.gz";
454 441 md5 = "b663300efdf421b3b727b19d7be9c7e7";
455 442 };
456 443 meta = {
457 444 license = [ pkgs.lib.licenses.asl20 ];
458 445 };
459 446 };
460 447 bottle = super.buildPythonPackage {
461 448 name = "bottle-0.12.8";
462 449 buildInputs = with self; [];
463 450 doCheck = false;
464 451 propagatedBuildInputs = with self; [];
465 452 src = fetchurl {
466 453 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
467 454 md5 = "13132c0a8f607bf860810a6ee9064c5b";
468 455 };
469 456 meta = {
470 457 license = [ pkgs.lib.licenses.mit ];
471 458 };
472 459 };
473 460 bumpversion = super.buildPythonPackage {
474 461 name = "bumpversion-0.5.3";
475 462 buildInputs = with self; [];
476 463 doCheck = false;
477 464 propagatedBuildInputs = with self; [];
478 465 src = fetchurl {
479 466 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
480 467 md5 = "c66a3492eafcf5ad4b024be9fca29820";
481 468 };
482 469 meta = {
483 470 license = [ pkgs.lib.licenses.mit ];
484 471 };
485 472 };
486 473 celery = super.buildPythonPackage {
487 474 name = "celery-2.2.10";
488 475 buildInputs = with self; [];
489 476 doCheck = false;
490 477 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
491 478 src = fetchurl {
492 479 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
493 480 md5 = "898bc87e54f278055b561316ba73e222";
494 481 };
495 482 meta = {
496 483 license = [ pkgs.lib.licenses.bsdOriginal ];
497 484 };
498 485 };
499 486 channelstream = super.buildPythonPackage {
500 487 name = "channelstream-0.5.2";
501 488 buildInputs = with self; [];
502 489 doCheck = false;
503 490 propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six];
504 491 src = fetchurl {
505 492 url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
506 493 md5 = "1c5eb2a8a405be6f1073da94da6d81d3";
507 494 };
508 495 meta = {
509 496 license = [ pkgs.lib.licenses.bsdOriginal ];
510 497 };
511 498 };
512 499 click = super.buildPythonPackage {
513 500 name = "click-5.1";
514 501 buildInputs = with self; [];
515 502 doCheck = false;
516 503 propagatedBuildInputs = with self; [];
517 504 src = fetchurl {
518 505 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
519 506 md5 = "9c5323008cccfe232a8b161fc8196d41";
520 507 };
521 508 meta = {
522 509 license = [ pkgs.lib.licenses.bsdOriginal ];
523 510 };
524 511 };
525 512 colander = super.buildPythonPackage {
526 513 name = "colander-1.3.3";
527 514 buildInputs = with self; [];
528 515 doCheck = false;
529 516 propagatedBuildInputs = with self; [translationstring iso8601];
530 517 src = fetchurl {
531 518 url = "https://pypi.python.org/packages/54/a9/9862a561e015b2c7b56404c0b13828a8bdc51e05ab3703bd792cec064487/colander-1.3.3.tar.gz";
532 519 md5 = "f5d783768c51d73695f49bbe95778ab4";
533 520 };
534 521 meta = {
535 522 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 523 };
537 524 };
538 525 configobj = super.buildPythonPackage {
539 526 name = "configobj-5.0.6";
540 527 buildInputs = with self; [];
541 528 doCheck = false;
542 529 propagatedBuildInputs = with self; [six];
543 530 src = fetchurl {
544 531 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
545 532 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
546 533 };
547 534 meta = {
548 535 license = [ pkgs.lib.licenses.bsdOriginal ];
549 536 };
550 537 };
551 538 configparser = super.buildPythonPackage {
552 539 name = "configparser-3.5.0";
553 540 buildInputs = with self; [];
554 541 doCheck = false;
555 542 propagatedBuildInputs = with self; [];
556 543 src = fetchurl {
557 544 url = "https://pypi.python.org/packages/7c/69/c2ce7e91c89dc073eb1aa74c0621c3eefbffe8216b3f9af9d3885265c01c/configparser-3.5.0.tar.gz";
558 545 md5 = "cfdd915a5b7a6c09917a64a573140538";
559 546 };
560 547 meta = {
561 548 license = [ pkgs.lib.licenses.mit ];
562 549 };
563 550 };
564 551 cov-core = super.buildPythonPackage {
565 552 name = "cov-core-1.15.0";
566 553 buildInputs = with self; [];
567 554 doCheck = false;
568 555 propagatedBuildInputs = with self; [coverage];
569 556 src = fetchurl {
570 557 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
571 558 md5 = "f519d4cb4c4e52856afb14af52919fe6";
572 559 };
573 560 meta = {
574 561 license = [ pkgs.lib.licenses.mit ];
575 562 };
576 563 };
577 564 coverage = super.buildPythonPackage {
578 565 name = "coverage-3.7.1";
579 566 buildInputs = with self; [];
580 567 doCheck = false;
581 568 propagatedBuildInputs = with self; [];
582 569 src = fetchurl {
583 570 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
584 571 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
585 572 };
586 573 meta = {
587 574 license = [ pkgs.lib.licenses.bsdOriginal ];
588 575 };
589 576 };
590 577 cssselect = super.buildPythonPackage {
591 578 name = "cssselect-1.0.1";
592 579 buildInputs = with self; [];
593 580 doCheck = false;
594 581 propagatedBuildInputs = with self; [];
595 582 src = fetchurl {
596 583 url = "https://pypi.python.org/packages/77/ff/9c865275cd19290feba56344eba570e719efb7ca5b34d67ed12b22ebbb0d/cssselect-1.0.1.tar.gz";
597 584 md5 = "3fa03bf82a9f0b1223c0f1eb1369e139";
598 585 };
599 586 meta = {
600 587 license = [ pkgs.lib.licenses.bsdOriginal ];
601 588 };
602 589 };
603 590 decorator = super.buildPythonPackage {
604 591 name = "decorator-4.0.11";
605 592 buildInputs = with self; [];
606 593 doCheck = false;
607 594 propagatedBuildInputs = with self; [];
608 595 src = fetchurl {
609 596 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
610 597 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
611 598 };
612 599 meta = {
613 600 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
614 601 };
615 602 };
616 603 deform = super.buildPythonPackage {
617 604 name = "deform-2.0.4";
618 605 buildInputs = with self; [];
619 606 doCheck = false;
620 607 propagatedBuildInputs = with self; [Chameleon colander iso8601 peppercorn translationstring zope.deprecation];
621 608 src = fetchurl {
622 609 url = "https://pypi.python.org/packages/66/3b/eefcb07abcab7a97f6665aa2d0cf1af741d9d6e78a2e4657fd2b89f89880/deform-2.0.4.tar.gz";
623 610 md5 = "34756e42cf50dd4b4430809116c4ec0a";
624 611 };
625 612 meta = {
626 613 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
627 614 };
628 615 };
629 616 docutils = super.buildPythonPackage {
630 617 name = "docutils-0.13.1";
631 618 buildInputs = with self; [];
632 619 doCheck = false;
633 620 propagatedBuildInputs = with self; [];
634 621 src = fetchurl {
635 622 url = "https://pypi.python.org/packages/05/25/7b5484aca5d46915493f1fd4ecb63c38c333bd32aa9ad6e19da8d08895ae/docutils-0.13.1.tar.gz";
636 623 md5 = "ea4a893c633c788be9b8078b6b305d53";
637 624 };
638 625 meta = {
639 626 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
640 627 };
641 628 };
642 629 dogpile.cache = super.buildPythonPackage {
643 630 name = "dogpile.cache-0.6.4";
644 631 buildInputs = with self; [];
645 632 doCheck = false;
646 633 propagatedBuildInputs = with self; [];
647 634 src = fetchurl {
648 635 url = "https://pypi.python.org/packages/b6/3d/35c05ca01c070bb70d9d422f2c4858ecb021b05b21af438fec5ccd7b945c/dogpile.cache-0.6.4.tar.gz";
649 636 md5 = "66e0a6cae6c08cb1ea25f89d0eadfeb0";
650 637 };
651 638 meta = {
652 639 license = [ pkgs.lib.licenses.bsdOriginal ];
653 640 };
654 641 };
655 642 dogpile.core = super.buildPythonPackage {
656 643 name = "dogpile.core-0.4.1";
657 644 buildInputs = with self; [];
658 645 doCheck = false;
659 646 propagatedBuildInputs = with self; [];
660 647 src = fetchurl {
661 648 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
662 649 md5 = "01cb19f52bba3e95c9b560f39341f045";
663 650 };
664 651 meta = {
665 652 license = [ pkgs.lib.licenses.bsdOriginal ];
666 653 };
667 654 };
668 655 ecdsa = super.buildPythonPackage {
669 656 name = "ecdsa-0.11";
670 657 buildInputs = with self; [];
671 658 doCheck = false;
672 659 propagatedBuildInputs = with self; [];
673 660 src = fetchurl {
674 661 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
675 662 md5 = "8ef586fe4dbb156697d756900cb41d7c";
676 663 };
677 664 meta = {
678 665 license = [ pkgs.lib.licenses.mit ];
679 666 };
680 667 };
681 668 elasticsearch = super.buildPythonPackage {
682 669 name = "elasticsearch-2.3.0";
683 670 buildInputs = with self; [];
684 671 doCheck = false;
685 672 propagatedBuildInputs = with self; [urllib3];
686 673 src = fetchurl {
687 674 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
688 675 md5 = "2550f3b51629cf1ef9636608af92c340";
689 676 };
690 677 meta = {
691 678 license = [ pkgs.lib.licenses.asl20 ];
692 679 };
693 680 };
694 681 elasticsearch-dsl = super.buildPythonPackage {
695 682 name = "elasticsearch-dsl-2.2.0";
696 683 buildInputs = with self; [];
697 684 doCheck = false;
698 685 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
699 686 src = fetchurl {
700 687 url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz";
701 688 md5 = "fa6bd3c87ea3caa8f0f051bc37c53221";
702 689 };
703 690 meta = {
704 691 license = [ pkgs.lib.licenses.asl20 ];
705 692 };
706 693 };
707 694 entrypoints = super.buildPythonPackage {
708 695 name = "entrypoints-0.2.2";
709 696 buildInputs = with self; [];
710 697 doCheck = false;
711 698 propagatedBuildInputs = with self; [configparser];
712 699 src = fetchurl {
713 700 url = "https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313";
714 701 md5 = "7db37771aea9ac9fefe093e5d6987313";
715 702 };
716 703 meta = {
717 704 license = [ pkgs.lib.licenses.mit ];
718 705 };
719 706 };
720 707 enum34 = super.buildPythonPackage {
721 708 name = "enum34-1.1.6";
722 709 buildInputs = with self; [];
723 710 doCheck = false;
724 711 propagatedBuildInputs = with self; [];
725 712 src = fetchurl {
726 713 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
727 714 md5 = "5f13a0841a61f7fc295c514490d120d0";
728 715 };
729 716 meta = {
730 717 license = [ pkgs.lib.licenses.bsdOriginal ];
731 718 };
732 719 };
733 720 funcsigs = super.buildPythonPackage {
734 721 name = "funcsigs-1.0.2";
735 722 buildInputs = with self; [];
736 723 doCheck = false;
737 724 propagatedBuildInputs = with self; [];
738 725 src = fetchurl {
739 726 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
740 727 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
741 728 };
742 729 meta = {
743 730 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
744 731 };
745 732 };
746 733 functools32 = super.buildPythonPackage {
747 734 name = "functools32-3.2.3.post2";
748 735 buildInputs = with self; [];
749 736 doCheck = false;
750 737 propagatedBuildInputs = with self; [];
751 738 src = fetchurl {
752 739 url = "https://pypi.python.org/packages/5e/1a/0aa2c8195a204a9f51284018562dea77e25511f02fe924fac202fc012172/functools32-3.2.3-2.zip";
753 740 md5 = "d55232eb132ec779e6893c902a0bc5ad";
754 741 };
755 742 meta = {
756 743 license = [ pkgs.lib.licenses.psfl ];
757 744 };
758 745 };
759 746 future = super.buildPythonPackage {
760 747 name = "future-0.14.3";
761 748 buildInputs = with self; [];
762 749 doCheck = false;
763 750 propagatedBuildInputs = with self; [];
764 751 src = fetchurl {
765 752 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
766 753 md5 = "e94079b0bd1fc054929e8769fc0f6083";
767 754 };
768 755 meta = {
769 756 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
770 757 };
771 758 };
772 759 futures = super.buildPythonPackage {
773 760 name = "futures-3.0.2";
774 761 buildInputs = with self; [];
775 762 doCheck = false;
776 763 propagatedBuildInputs = with self; [];
777 764 src = fetchurl {
778 765 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
779 766 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
780 767 };
781 768 meta = {
782 769 license = [ pkgs.lib.licenses.bsdOriginal ];
783 770 };
784 771 };
785 772 gevent = super.buildPythonPackage {
786 773 name = "gevent-1.2.2";
787 774 buildInputs = with self; [];
788 775 doCheck = false;
789 776 propagatedBuildInputs = with self; [greenlet];
790 777 src = fetchurl {
791 778 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
792 779 md5 = "7f0baf355384fe5ff2ecf66853422554";
793 780 };
794 781 meta = {
795 782 license = [ pkgs.lib.licenses.mit ];
796 783 };
797 784 };
798 785 gnureadline = super.buildPythonPackage {
799 786 name = "gnureadline-6.3.3";
800 787 buildInputs = with self; [];
801 788 doCheck = false;
802 789 propagatedBuildInputs = with self; [];
803 790 src = fetchurl {
804 791 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
805 792 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
806 793 };
807 794 meta = {
808 795 license = [ pkgs.lib.licenses.gpl1 ];
809 796 };
810 797 };
811 798 gprof2dot = super.buildPythonPackage {
812 799 name = "gprof2dot-2016.10.13";
813 800 buildInputs = with self; [];
814 801 doCheck = false;
815 802 propagatedBuildInputs = with self; [];
816 803 src = fetchurl {
817 804 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
818 805 md5 = "0125401f15fd2afe1df686a76c64a4fd";
819 806 };
820 807 meta = {
821 808 license = [ { fullName = "LGPL"; } ];
822 809 };
823 810 };
824 811 graphviz = super.buildPythonPackage {
825 name = "graphviz-0.7.1";
812 name = "graphviz-0.8";
826 813 buildInputs = with self; [];
827 814 doCheck = false;
828 815 propagatedBuildInputs = with self; [];
829 816 src = fetchurl {
830 url = "https://pypi.python.org/packages/7d/2d/f5cfa56467ca5a65eb44e1103d89d2f65dbc4f04cf7a1f3d38e973c3d1a8/graphviz-0.7.1.zip";
831 md5 = "d5926e89975121d56dec777a79bfc9d1";
817 url = "https://pypi.python.org/packages/da/84/0e997520323d6b01124eb01c68d5c101814d0aab53083cd62bd75a90f70b/graphviz-0.8.zip";
818 md5 = "9486a885360a5ee54a81eb2950470c71";
832 819 };
833 820 meta = {
834 821 license = [ pkgs.lib.licenses.mit ];
835 822 };
836 823 };
837 824 greenlet = super.buildPythonPackage {
838 825 name = "greenlet-0.4.12";
839 826 buildInputs = with self; [];
840 827 doCheck = false;
841 828 propagatedBuildInputs = with self; [];
842 829 src = fetchurl {
843 830 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
844 831 md5 = "e8637647d58a26c4a1f51ca393e53c00";
845 832 };
846 833 meta = {
847 834 license = [ pkgs.lib.licenses.mit ];
848 835 };
849 836 };
850 837 gunicorn = super.buildPythonPackage {
851 838 name = "gunicorn-19.7.1";
852 839 buildInputs = with self; [];
853 840 doCheck = false;
854 841 propagatedBuildInputs = with self; [];
855 842 src = fetchurl {
856 843 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
857 844 md5 = "174d3c3cd670a5be0404d84c484e590c";
858 845 };
859 846 meta = {
860 847 license = [ pkgs.lib.licenses.mit ];
861 848 };
862 849 };
863 850 html5lib = super.buildPythonPackage {
864 851 name = "html5lib-0.9999999";
865 852 buildInputs = with self; [];
866 853 doCheck = false;
867 854 propagatedBuildInputs = with self; [six];
868 855 src = fetchurl {
869 856 url = "https://pypi.python.org/packages/ae/ae/bcb60402c60932b32dfaf19bb53870b29eda2cd17551ba5639219fb5ebf9/html5lib-0.9999999.tar.gz";
870 857 md5 = "ef43cb05e9e799f25d65d1135838a96f";
871 858 };
872 859 meta = {
873 860 license = [ pkgs.lib.licenses.mit ];
874 861 };
875 862 };
876 863 infrae.cache = super.buildPythonPackage {
877 864 name = "infrae.cache-1.0.1";
878 865 buildInputs = with self; [];
879 866 doCheck = false;
880 867 propagatedBuildInputs = with self; [Beaker repoze.lru];
881 868 src = fetchurl {
882 869 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
883 870 md5 = "b09076a766747e6ed2a755cc62088e32";
884 871 };
885 872 meta = {
886 873 license = [ pkgs.lib.licenses.zpt21 ];
887 874 };
888 875 };
889 876 invoke = super.buildPythonPackage {
890 877 name = "invoke-0.13.0";
891 878 buildInputs = with self; [];
892 879 doCheck = false;
893 880 propagatedBuildInputs = with self; [];
894 881 src = fetchurl {
895 882 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
896 883 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
897 884 };
898 885 meta = {
899 886 license = [ pkgs.lib.licenses.bsdOriginal ];
900 887 };
901 888 };
889 ipaddress = super.buildPythonPackage {
890 name = "ipaddress-1.0.18";
891 buildInputs = with self; [];
892 doCheck = false;
893 propagatedBuildInputs = with self; [];
894 src = fetchurl {
895 url = "https://pypi.python.org/packages/4e/13/774faf38b445d0b3a844b65747175b2e0500164b7c28d78e34987a5bfe06/ipaddress-1.0.18.tar.gz";
896 md5 = "310c2dfd64eb6f0df44aa8c59f2334a7";
897 };
898 meta = {
899 license = [ pkgs.lib.licenses.psfl ];
900 };
901 };
902 902 ipdb = super.buildPythonPackage {
903 903 name = "ipdb-0.10.3";
904 904 buildInputs = with self; [];
905 905 doCheck = false;
906 906 propagatedBuildInputs = with self; [setuptools ipython];
907 907 src = fetchurl {
908 908 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
909 909 md5 = "def1f6ac075d54bdee07e6501263d4fa";
910 910 };
911 911 meta = {
912 912 license = [ pkgs.lib.licenses.bsdOriginal ];
913 913 };
914 914 };
915 915 ipython = super.buildPythonPackage {
916 916 name = "ipython-5.1.0";
917 917 buildInputs = with self; [];
918 918 doCheck = false;
919 919 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
920 920 src = fetchurl {
921 921 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
922 922 md5 = "47c8122420f65b58784cb4b9b4af35e3";
923 923 };
924 924 meta = {
925 925 license = [ pkgs.lib.licenses.bsdOriginal ];
926 926 };
927 927 };
928 928 ipython-genutils = super.buildPythonPackage {
929 929 name = "ipython-genutils-0.2.0";
930 930 buildInputs = with self; [];
931 931 doCheck = false;
932 932 propagatedBuildInputs = with self; [];
933 933 src = fetchurl {
934 934 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
935 935 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
936 936 };
937 937 meta = {
938 938 license = [ pkgs.lib.licenses.bsdOriginal ];
939 939 };
940 940 };
941 941 iso8601 = super.buildPythonPackage {
942 942 name = "iso8601-0.1.11";
943 943 buildInputs = with self; [];
944 944 doCheck = false;
945 945 propagatedBuildInputs = with self; [];
946 946 src = fetchurl {
947 947 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
948 948 md5 = "b06d11cd14a64096f907086044f0fe38";
949 949 };
950 950 meta = {
951 951 license = [ pkgs.lib.licenses.mit ];
952 952 };
953 953 };
954 954 itsdangerous = super.buildPythonPackage {
955 955 name = "itsdangerous-0.24";
956 956 buildInputs = with self; [];
957 957 doCheck = false;
958 958 propagatedBuildInputs = with self; [];
959 959 src = fetchurl {
960 960 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
961 961 md5 = "a3d55aa79369aef5345c036a8a26307f";
962 962 };
963 963 meta = {
964 964 license = [ pkgs.lib.licenses.bsdOriginal ];
965 965 };
966 966 };
967 967 jsonschema = super.buildPythonPackage {
968 968 name = "jsonschema-2.6.0";
969 969 buildInputs = with self; [];
970 970 doCheck = false;
971 971 propagatedBuildInputs = with self; [functools32];
972 972 src = fetchurl {
973 973 url = "https://pypi.python.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
974 974 md5 = "50c6b69a373a8b55ff1e0ec6e78f13f4";
975 975 };
976 976 meta = {
977 977 license = [ pkgs.lib.licenses.mit ];
978 978 };
979 979 };
980 980 jupyter-client = super.buildPythonPackage {
981 981 name = "jupyter-client-5.0.0";
982 982 buildInputs = with self; [];
983 983 doCheck = false;
984 984 propagatedBuildInputs = with self; [traitlets jupyter-core pyzmq python-dateutil];
985 985 src = fetchurl {
986 986 url = "https://pypi.python.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
987 987 md5 = "1acd331b5c9fb4d79dae9939e79f2426";
988 988 };
989 989 meta = {
990 990 license = [ pkgs.lib.licenses.bsdOriginal ];
991 991 };
992 992 };
993 993 jupyter-core = super.buildPythonPackage {
994 994 name = "jupyter-core-4.3.0";
995 995 buildInputs = with self; [];
996 996 doCheck = false;
997 997 propagatedBuildInputs = with self; [traitlets];
998 998 src = fetchurl {
999 999 url = "https://pypi.python.org/packages/2f/39/5138f975100ce14d150938df48a83cd852a3fd8e24b1244f4113848e69e2/jupyter_core-4.3.0.tar.gz";
1000 1000 md5 = "18819511a809afdeed9a995a9c27bcfb";
1001 1001 };
1002 1002 meta = {
1003 1003 license = [ pkgs.lib.licenses.bsdOriginal ];
1004 1004 };
1005 1005 };
1006 1006 hupper = super.buildPythonPackage {
1007 1007 name = "hupper-1.0";
1008 1008 buildInputs = with self; [];
1009 1009 doCheck = false;
1010 1010 propagatedBuildInputs = with self; [];
1011 1011 src = fetchurl {
1012 1012 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
1013 1013 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
1014 1014 };
1015 1015 meta = {
1016 1016 license = [ pkgs.lib.licenses.mit ];
1017 1017 };
1018 1018 };
1019 1019 kombu = super.buildPythonPackage {
1020 1020 name = "kombu-1.5.1";
1021 1021 buildInputs = with self; [];
1022 1022 doCheck = false;
1023 1023 propagatedBuildInputs = with self; [anyjson amqplib];
1024 1024 src = fetchurl {
1025 1025 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
1026 1026 md5 = "50662f3c7e9395b3d0721fb75d100b63";
1027 1027 };
1028 1028 meta = {
1029 1029 license = [ pkgs.lib.licenses.bsdOriginal ];
1030 1030 };
1031 1031 };
1032 1032 lxml = super.buildPythonPackage {
1033 1033 name = "lxml-3.7.3";
1034 1034 buildInputs = with self; [];
1035 1035 doCheck = false;
1036 1036 propagatedBuildInputs = with self; [];
1037 1037 src = fetchurl {
1038 1038 url = "https://pypi.python.org/packages/39/e8/a8e0b1fa65dd021d48fe21464f71783655f39a41f218293c1c590d54eb82/lxml-3.7.3.tar.gz";
1039 1039 md5 = "075692ce442e69bbd604d44e21c02753";
1040 1040 };
1041 1041 meta = {
1042 1042 license = [ pkgs.lib.licenses.bsdOriginal ];
1043 1043 };
1044 1044 };
1045 1045 meld3 = super.buildPythonPackage {
1046 1046 name = "meld3-1.0.2";
1047 1047 buildInputs = with self; [];
1048 1048 doCheck = false;
1049 1049 propagatedBuildInputs = with self; [];
1050 1050 src = fetchurl {
1051 1051 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
1052 1052 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
1053 1053 };
1054 1054 meta = {
1055 1055 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1056 1056 };
1057 1057 };
1058 1058 mistune = super.buildPythonPackage {
1059 1059 name = "mistune-0.7.4";
1060 1060 buildInputs = with self; [];
1061 1061 doCheck = false;
1062 1062 propagatedBuildInputs = with self; [];
1063 1063 src = fetchurl {
1064 1064 url = "https://pypi.python.org/packages/25/a4/12a584c0c59c9fed529f8b3c47ca8217c0cf8bcc5e1089d3256410cfbdbc/mistune-0.7.4.tar.gz";
1065 1065 md5 = "92d01cb717e9e74429e9bde9d29ac43b";
1066 1066 };
1067 1067 meta = {
1068 1068 license = [ pkgs.lib.licenses.bsdOriginal ];
1069 1069 };
1070 1070 };
1071 1071 mock = super.buildPythonPackage {
1072 1072 name = "mock-1.0.1";
1073 1073 buildInputs = with self; [];
1074 1074 doCheck = false;
1075 1075 propagatedBuildInputs = with self; [];
1076 1076 src = fetchurl {
1077 1077 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
1078 1078 md5 = "869f08d003c289a97c1a6610faf5e913";
1079 1079 };
1080 1080 meta = {
1081 1081 license = [ pkgs.lib.licenses.bsdOriginal ];
1082 1082 };
1083 1083 };
1084 1084 msgpack-python = super.buildPythonPackage {
1085 1085 name = "msgpack-python-0.4.8";
1086 1086 buildInputs = with self; [];
1087 1087 doCheck = false;
1088 1088 propagatedBuildInputs = with self; [];
1089 1089 src = fetchurl {
1090 1090 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
1091 1091 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
1092 1092 };
1093 1093 meta = {
1094 1094 license = [ pkgs.lib.licenses.asl20 ];
1095 1095 };
1096 1096 };
1097 1097 nbconvert = super.buildPythonPackage {
1098 1098 name = "nbconvert-5.1.1";
1099 1099 buildInputs = with self; [];
1100 1100 doCheck = false;
1101 1101 propagatedBuildInputs = with self; [mistune Jinja2 Pygments traitlets jupyter-core nbformat entrypoints bleach pandocfilters testpath];
1102 1102 src = fetchurl {
1103 1103 url = "https://pypi.python.org/packages/95/58/df1c91f1658ee5df19097f915a1e71c91fc824a708d82d2b2e35f8b80e9a/nbconvert-5.1.1.tar.gz";
1104 1104 md5 = "d0263fb03a44db2f94eea09a608ed813";
1105 1105 };
1106 1106 meta = {
1107 1107 license = [ pkgs.lib.licenses.bsdOriginal ];
1108 1108 };
1109 1109 };
1110 1110 nbformat = super.buildPythonPackage {
1111 1111 name = "nbformat-4.3.0";
1112 1112 buildInputs = with self; [];
1113 1113 doCheck = false;
1114 1114 propagatedBuildInputs = with self; [ipython-genutils traitlets jsonschema jupyter-core];
1115 1115 src = fetchurl {
1116 1116 url = "https://pypi.python.org/packages/f9/c5/89df4abf906f766727f976e170caa85b4f1c1d1feb1f45d716016e68e19f/nbformat-4.3.0.tar.gz";
1117 1117 md5 = "9a00d20425914cd5ba5f97769d9963ca";
1118 1118 };
1119 1119 meta = {
1120 1120 license = [ pkgs.lib.licenses.bsdOriginal ];
1121 1121 };
1122 1122 };
1123 1123 nose = super.buildPythonPackage {
1124 1124 name = "nose-1.3.6";
1125 1125 buildInputs = with self; [];
1126 1126 doCheck = false;
1127 1127 propagatedBuildInputs = with self; [];
1128 1128 src = fetchurl {
1129 1129 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
1130 1130 md5 = "0ca546d81ca8309080fc80cb389e7a16";
1131 1131 };
1132 1132 meta = {
1133 1133 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
1134 1134 };
1135 1135 };
1136 1136 objgraph = super.buildPythonPackage {
1137 1137 name = "objgraph-3.1.0";
1138 1138 buildInputs = with self; [];
1139 1139 doCheck = false;
1140 1140 propagatedBuildInputs = with self; [graphviz];
1141 1141 src = fetchurl {
1142 1142 url = "https://pypi.python.org/packages/f4/b3/082e54e62094cb2ec84f8d5a49e0142cef99016491cecba83309cff920ae/objgraph-3.1.0.tar.gz";
1143 1143 md5 = "eddbd96039796bfbd13eee403701e64a";
1144 1144 };
1145 1145 meta = {
1146 1146 license = [ pkgs.lib.licenses.mit ];
1147 1147 };
1148 1148 };
1149 1149 packaging = super.buildPythonPackage {
1150 1150 name = "packaging-15.2";
1151 1151 buildInputs = with self; [];
1152 1152 doCheck = false;
1153 1153 propagatedBuildInputs = with self; [];
1154 1154 src = fetchurl {
1155 1155 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1156 1156 md5 = "c16093476f6ced42128bf610e5db3784";
1157 1157 };
1158 1158 meta = {
1159 1159 license = [ pkgs.lib.licenses.asl20 ];
1160 1160 };
1161 1161 };
1162 1162 pandocfilters = super.buildPythonPackage {
1163 1163 name = "pandocfilters-1.4.1";
1164 1164 buildInputs = with self; [];
1165 1165 doCheck = false;
1166 1166 propagatedBuildInputs = with self; [];
1167 1167 src = fetchurl {
1168 1168 url = "https://pypi.python.org/packages/e3/1f/21d1b7e8ca571e80b796c758d361fdf5554335ff138158654684bc5401d8/pandocfilters-1.4.1.tar.gz";
1169 1169 md5 = "7680d9f9ec07397dd17f380ee3818b9d";
1170 1170 };
1171 1171 meta = {
1172 1172 license = [ pkgs.lib.licenses.bsdOriginal ];
1173 1173 };
1174 1174 };
1175 1175 paramiko = super.buildPythonPackage {
1176 1176 name = "paramiko-1.15.1";
1177 1177 buildInputs = with self; [];
1178 1178 doCheck = false;
1179 1179 propagatedBuildInputs = with self; [pycrypto ecdsa];
1180 1180 src = fetchurl {
1181 1181 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
1182 1182 md5 = "48c274c3f9b1282932567b21f6acf3b5";
1183 1183 };
1184 1184 meta = {
1185 1185 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1186 1186 };
1187 1187 };
1188 1188 pathlib2 = super.buildPythonPackage {
1189 1189 name = "pathlib2-2.3.0";
1190 1190 buildInputs = with self; [];
1191 1191 doCheck = false;
1192 1192 propagatedBuildInputs = with self; [six scandir];
1193 1193 src = fetchurl {
1194 1194 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
1195 1195 md5 = "89c90409d11fd5947966b6a30a47d18c";
1196 1196 };
1197 1197 meta = {
1198 1198 license = [ pkgs.lib.licenses.mit ];
1199 1199 };
1200 1200 };
1201 1201 peppercorn = super.buildPythonPackage {
1202 1202 name = "peppercorn-0.5";
1203 1203 buildInputs = with self; [];
1204 1204 doCheck = false;
1205 1205 propagatedBuildInputs = with self; [];
1206 1206 src = fetchurl {
1207 1207 url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz";
1208 1208 md5 = "f08efbca5790019ab45d76b7244abd40";
1209 1209 };
1210 1210 meta = {
1211 1211 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1212 1212 };
1213 1213 };
1214 1214 pexpect = super.buildPythonPackage {
1215 1215 name = "pexpect-4.2.1";
1216 1216 buildInputs = with self; [];
1217 1217 doCheck = false;
1218 1218 propagatedBuildInputs = with self; [ptyprocess];
1219 1219 src = fetchurl {
1220 1220 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
1221 1221 md5 = "3694410001a99dff83f0b500a1ca1c95";
1222 1222 };
1223 1223 meta = {
1224 1224 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1225 1225 };
1226 1226 };
1227 1227 pickleshare = super.buildPythonPackage {
1228 1228 name = "pickleshare-0.7.4";
1229 1229 buildInputs = with self; [];
1230 1230 doCheck = false;
1231 1231 propagatedBuildInputs = with self; [pathlib2];
1232 1232 src = fetchurl {
1233 1233 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
1234 1234 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
1235 1235 };
1236 1236 meta = {
1237 1237 license = [ pkgs.lib.licenses.mit ];
1238 1238 };
1239 1239 };
1240 1240 plaster = super.buildPythonPackage {
1241 1241 name = "plaster-0.5";
1242 1242 buildInputs = with self; [];
1243 1243 doCheck = false;
1244 1244 propagatedBuildInputs = with self; [setuptools];
1245 1245 src = fetchurl {
1246 1246 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
1247 1247 md5 = "c59345a67a860cfcaa1bd6a81451399d";
1248 1248 };
1249 1249 meta = {
1250 1250 license = [ pkgs.lib.licenses.mit ];
1251 1251 };
1252 1252 };
1253 1253 plaster-pastedeploy = super.buildPythonPackage {
1254 1254 name = "plaster-pastedeploy-0.4.1";
1255 1255 buildInputs = with self; [];
1256 1256 doCheck = false;
1257 1257 propagatedBuildInputs = with self; [PasteDeploy plaster];
1258 1258 src = fetchurl {
1259 1259 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
1260 1260 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
1261 1261 };
1262 1262 meta = {
1263 1263 license = [ pkgs.lib.licenses.mit ];
1264 1264 };
1265 1265 };
1266 1266 prompt-toolkit = super.buildPythonPackage {
1267 1267 name = "prompt-toolkit-1.0.14";
1268 1268 buildInputs = with self; [];
1269 1269 doCheck = false;
1270 1270 propagatedBuildInputs = with self; [six wcwidth];
1271 1271 src = fetchurl {
1272 1272 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
1273 1273 md5 = "f24061ae133ed32c6b764e92bd48c496";
1274 1274 };
1275 1275 meta = {
1276 1276 license = [ pkgs.lib.licenses.bsdOriginal ];
1277 1277 };
1278 1278 };
1279 1279 psutil = super.buildPythonPackage {
1280 1280 name = "psutil-4.3.1";
1281 1281 buildInputs = with self; [];
1282 1282 doCheck = false;
1283 1283 propagatedBuildInputs = with self; [];
1284 1284 src = fetchurl {
1285 1285 url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz";
1286 1286 md5 = "199a366dba829c88bddaf5b41d19ddc0";
1287 1287 };
1288 1288 meta = {
1289 1289 license = [ pkgs.lib.licenses.bsdOriginal ];
1290 1290 };
1291 1291 };
1292 1292 psycopg2 = super.buildPythonPackage {
1293 1293 name = "psycopg2-2.7.1";
1294 1294 buildInputs = with self; [];
1295 1295 doCheck = false;
1296 1296 propagatedBuildInputs = with self; [];
1297 1297 src = fetchurl {
1298 1298 url = "https://pypi.python.org/packages/f8/e9/5793369ce8a41bf5467623ded8d59a434dfef9c136351aca4e70c2657ba0/psycopg2-2.7.1.tar.gz";
1299 1299 md5 = "67848ac33af88336046802f6ef7081f3";
1300 1300 };
1301 1301 meta = {
1302 1302 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1303 1303 };
1304 1304 };
1305 1305 ptyprocess = super.buildPythonPackage {
1306 1306 name = "ptyprocess-0.5.2";
1307 1307 buildInputs = with self; [];
1308 1308 doCheck = false;
1309 1309 propagatedBuildInputs = with self; [];
1310 1310 src = fetchurl {
1311 1311 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
1312 1312 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
1313 1313 };
1314 1314 meta = {
1315 1315 license = [ ];
1316 1316 };
1317 1317 };
1318 1318 py = super.buildPythonPackage {
1319 1319 name = "py-1.4.34";
1320 1320 buildInputs = with self; [];
1321 1321 doCheck = false;
1322 1322 propagatedBuildInputs = with self; [];
1323 1323 src = fetchurl {
1324 1324 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
1325 1325 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
1326 1326 };
1327 1327 meta = {
1328 1328 license = [ pkgs.lib.licenses.mit ];
1329 1329 };
1330 1330 };
1331 1331 py-bcrypt = super.buildPythonPackage {
1332 1332 name = "py-bcrypt-0.4";
1333 1333 buildInputs = with self; [];
1334 1334 doCheck = false;
1335 1335 propagatedBuildInputs = with self; [];
1336 1336 src = fetchurl {
1337 1337 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1338 1338 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1339 1339 };
1340 1340 meta = {
1341 1341 license = [ pkgs.lib.licenses.bsdOriginal ];
1342 1342 };
1343 1343 };
1344 1344 py-gfm = super.buildPythonPackage {
1345 1345 name = "py-gfm-0.1.3";
1346 1346 buildInputs = with self; [];
1347 1347 doCheck = false;
1348 1348 propagatedBuildInputs = with self; [setuptools Markdown];
1349 1349 src = fetchurl {
1350 1350 url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16";
1351 1351 md5 = "0d0d5385bfb629eea636a80b9c2bfd16";
1352 1352 };
1353 1353 meta = {
1354 1354 license = [ pkgs.lib.licenses.bsdOriginal ];
1355 1355 };
1356 1356 };
1357 1357 pycrypto = super.buildPythonPackage {
1358 1358 name = "pycrypto-2.6.1";
1359 1359 buildInputs = with self; [];
1360 1360 doCheck = false;
1361 1361 propagatedBuildInputs = with self; [];
1362 1362 src = fetchurl {
1363 1363 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1364 1364 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1365 1365 };
1366 1366 meta = {
1367 1367 license = [ pkgs.lib.licenses.publicDomain ];
1368 1368 };
1369 1369 };
1370 1370 pycurl = super.buildPythonPackage {
1371 1371 name = "pycurl-7.19.5";
1372 1372 buildInputs = with self; [];
1373 1373 doCheck = false;
1374 1374 propagatedBuildInputs = with self; [];
1375 1375 src = fetchurl {
1376 1376 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1377 1377 md5 = "47b4eac84118e2606658122104e62072";
1378 1378 };
1379 1379 meta = {
1380 1380 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1381 1381 };
1382 1382 };
1383 1383 pyflakes = super.buildPythonPackage {
1384 1384 name = "pyflakes-0.8.1";
1385 1385 buildInputs = with self; [];
1386 1386 doCheck = false;
1387 1387 propagatedBuildInputs = with self; [];
1388 1388 src = fetchurl {
1389 1389 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1390 1390 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1391 1391 };
1392 1392 meta = {
1393 1393 license = [ pkgs.lib.licenses.mit ];
1394 1394 };
1395 1395 };
1396 1396 pygments-markdown-lexer = super.buildPythonPackage {
1397 1397 name = "pygments-markdown-lexer-0.1.0.dev39";
1398 1398 buildInputs = with self; [];
1399 1399 doCheck = false;
1400 1400 propagatedBuildInputs = with self; [Pygments];
1401 1401 src = fetchurl {
1402 1402 url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip";
1403 1403 md5 = "6360fe0f6d1f896e35b7a0142ce6459c";
1404 1404 };
1405 1405 meta = {
1406 1406 license = [ pkgs.lib.licenses.asl20 ];
1407 1407 };
1408 1408 };
1409 1409 pyparsing = super.buildPythonPackage {
1410 1410 name = "pyparsing-1.5.7";
1411 1411 buildInputs = with self; [];
1412 1412 doCheck = false;
1413 1413 propagatedBuildInputs = with self; [];
1414 1414 src = fetchurl {
1415 1415 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1416 1416 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1417 1417 };
1418 1418 meta = {
1419 1419 license = [ pkgs.lib.licenses.mit ];
1420 1420 };
1421 1421 };
1422 1422 pyramid = super.buildPythonPackage {
1423 1423 name = "pyramid-1.9";
1424 1424 buildInputs = with self; [];
1425 1425 doCheck = false;
1426 1426 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
1427 1427 src = fetchurl {
1428 1428 url = "https://pypi.python.org/packages/b0/73/715321e129334f3e41430bede877620175a63ed075fd5d1fd2c25b7cb121/pyramid-1.9.tar.gz";
1429 1429 md5 = "aa6c7c568f83151af51eb053ac633bc4";
1430 1430 };
1431 1431 meta = {
1432 1432 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1433 1433 };
1434 1434 };
1435 1435 pyramid-beaker = super.buildPythonPackage {
1436 1436 name = "pyramid-beaker-0.8";
1437 1437 buildInputs = with self; [];
1438 1438 doCheck = false;
1439 1439 propagatedBuildInputs = with self; [pyramid Beaker];
1440 1440 src = fetchurl {
1441 1441 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1442 1442 md5 = "22f14be31b06549f80890e2c63a93834";
1443 1443 };
1444 1444 meta = {
1445 1445 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1446 1446 };
1447 1447 };
1448 1448 pyramid-debugtoolbar = super.buildPythonPackage {
1449 name = "pyramid-debugtoolbar-3.0.5";
1449 name = "pyramid-debugtoolbar-4.2.1";
1450 1450 buildInputs = with self; [];
1451 1451 doCheck = false;
1452 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1452 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments ipaddress];
1453 1453 src = fetchurl {
1454 url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz";
1455 md5 = "aebab8c3bfdc6f89e4d3adc1d126538e";
1454 url = "https://pypi.python.org/packages/db/26/94620b7752936e2cd74838263ff366db9b454f7394bfb62d1eb2f84b29c1/pyramid_debugtoolbar-4.2.1.tar.gz";
1455 md5 = "3dfaced2fab1644ff5284017be9d92b9";
1456 1456 };
1457 1457 meta = {
1458 1458 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1459 1459 };
1460 1460 };
1461 1461 pyramid-jinja2 = super.buildPythonPackage {
1462 1462 name = "pyramid-jinja2-2.5";
1463 1463 buildInputs = with self; [];
1464 1464 doCheck = false;
1465 1465 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1466 1466 src = fetchurl {
1467 1467 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1468 1468 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1469 1469 };
1470 1470 meta = {
1471 1471 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1472 1472 };
1473 1473 };
1474 1474 pyramid-mako = super.buildPythonPackage {
1475 1475 name = "pyramid-mako-1.0.2";
1476 1476 buildInputs = with self; [];
1477 1477 doCheck = false;
1478 1478 propagatedBuildInputs = with self; [pyramid Mako];
1479 1479 src = fetchurl {
1480 1480 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1481 1481 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1482 1482 };
1483 1483 meta = {
1484 1484 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1485 1485 };
1486 1486 };
1487 1487 pysqlite = super.buildPythonPackage {
1488 1488 name = "pysqlite-2.8.3";
1489 1489 buildInputs = with self; [];
1490 1490 doCheck = false;
1491 1491 propagatedBuildInputs = with self; [];
1492 1492 src = fetchurl {
1493 1493 url = "https://pypi.python.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1494 1494 md5 = "033f17b8644577715aee55e8832ac9fc";
1495 1495 };
1496 1496 meta = {
1497 1497 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1498 1498 };
1499 1499 };
1500 1500 pytest = super.buildPythonPackage {
1501 1501 name = "pytest-3.1.2";
1502 1502 buildInputs = with self; [];
1503 1503 doCheck = false;
1504 1504 propagatedBuildInputs = with self; [py setuptools];
1505 1505 src = fetchurl {
1506 1506 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
1507 1507 md5 = "c4d179f89043cc925e1c169d03128e02";
1508 1508 };
1509 1509 meta = {
1510 1510 license = [ pkgs.lib.licenses.mit ];
1511 1511 };
1512 1512 };
1513 1513 pytest-catchlog = super.buildPythonPackage {
1514 1514 name = "pytest-catchlog-1.2.2";
1515 1515 buildInputs = with self; [];
1516 1516 doCheck = false;
1517 1517 propagatedBuildInputs = with self; [py pytest];
1518 1518 src = fetchurl {
1519 1519 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1520 1520 md5 = "09d890c54c7456c818102b7ff8c182c8";
1521 1521 };
1522 1522 meta = {
1523 1523 license = [ pkgs.lib.licenses.mit ];
1524 1524 };
1525 1525 };
1526 1526 pytest-cov = super.buildPythonPackage {
1527 1527 name = "pytest-cov-2.5.1";
1528 1528 buildInputs = with self; [];
1529 1529 doCheck = false;
1530 1530 propagatedBuildInputs = with self; [pytest coverage];
1531 1531 src = fetchurl {
1532 1532 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
1533 1533 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
1534 1534 };
1535 1535 meta = {
1536 1536 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1537 1537 };
1538 1538 };
1539 1539 pytest-profiling = super.buildPythonPackage {
1540 1540 name = "pytest-profiling-1.2.6";
1541 1541 buildInputs = with self; [];
1542 1542 doCheck = false;
1543 1543 propagatedBuildInputs = with self; [six pytest gprof2dot];
1544 1544 src = fetchurl {
1545 1545 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
1546 1546 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
1547 1547 };
1548 1548 meta = {
1549 1549 license = [ pkgs.lib.licenses.mit ];
1550 1550 };
1551 1551 };
1552 1552 pytest-runner = super.buildPythonPackage {
1553 1553 name = "pytest-runner-2.11.1";
1554 1554 buildInputs = with self; [];
1555 1555 doCheck = false;
1556 1556 propagatedBuildInputs = with self; [];
1557 1557 src = fetchurl {
1558 1558 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
1559 1559 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
1560 1560 };
1561 1561 meta = {
1562 1562 license = [ pkgs.lib.licenses.mit ];
1563 1563 };
1564 1564 };
1565 1565 pytest-sugar = super.buildPythonPackage {
1566 1566 name = "pytest-sugar-0.8.0";
1567 1567 buildInputs = with self; [];
1568 1568 doCheck = false;
1569 1569 propagatedBuildInputs = with self; [pytest termcolor];
1570 1570 src = fetchurl {
1571 1571 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
1572 1572 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
1573 1573 };
1574 1574 meta = {
1575 1575 license = [ pkgs.lib.licenses.bsdOriginal ];
1576 1576 };
1577 1577 };
1578 1578 pytest-timeout = super.buildPythonPackage {
1579 1579 name = "pytest-timeout-1.2.0";
1580 1580 buildInputs = with self; [];
1581 1581 doCheck = false;
1582 1582 propagatedBuildInputs = with self; [pytest];
1583 1583 src = fetchurl {
1584 1584 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
1585 1585 md5 = "83607d91aa163562c7ee835da57d061d";
1586 1586 };
1587 1587 meta = {
1588 1588 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1589 1589 };
1590 1590 };
1591 1591 python-dateutil = super.buildPythonPackage {
1592 1592 name = "python-dateutil-2.1";
1593 1593 buildInputs = with self; [];
1594 1594 doCheck = false;
1595 1595 propagatedBuildInputs = with self; [six];
1596 1596 src = fetchurl {
1597 1597 url = "https://pypi.python.org/packages/65/52/9c18dac21f174ad31b65e22d24297864a954e6fe65876eba3f5773d2da43/python-dateutil-2.1.tar.gz";
1598 1598 md5 = "1534bb15cf311f07afaa3aacba1c028b";
1599 1599 };
1600 1600 meta = {
1601 1601 license = [ { fullName = "Simplified BSD"; } ];
1602 1602 };
1603 1603 };
1604 1604 python-editor = super.buildPythonPackage {
1605 1605 name = "python-editor-1.0.3";
1606 1606 buildInputs = with self; [];
1607 1607 doCheck = false;
1608 1608 propagatedBuildInputs = with self; [];
1609 1609 src = fetchurl {
1610 1610 url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz";
1611 1611 md5 = "0aca5f2ef176ce68e98a5b7e31372835";
1612 1612 };
1613 1613 meta = {
1614 1614 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1615 1615 };
1616 1616 };
1617 1617 python-ldap = super.buildPythonPackage {
1618 1618 name = "python-ldap-2.4.40";
1619 1619 buildInputs = with self; [];
1620 1620 doCheck = false;
1621 1621 propagatedBuildInputs = with self; [setuptools];
1622 1622 src = fetchurl {
1623 1623 url = "https://pypi.python.org/packages/4a/d8/7d70a7469058a3987d224061a81d778951ac2b48220bdcc511e4b1b37176/python-ldap-2.4.40.tar.gz";
1624 1624 md5 = "aea0233f7d39b0c7549fcd310deeb0e5";
1625 1625 };
1626 1626 meta = {
1627 1627 license = [ pkgs.lib.licenses.psfl ];
1628 1628 };
1629 1629 };
1630 1630 python-memcached = super.buildPythonPackage {
1631 1631 name = "python-memcached-1.58";
1632 1632 buildInputs = with self; [];
1633 1633 doCheck = false;
1634 1634 propagatedBuildInputs = with self; [six];
1635 1635 src = fetchurl {
1636 1636 url = "https://pypi.python.org/packages/f7/62/14b2448cfb04427366f24104c9da97cf8ea380d7258a3233f066a951a8d8/python-memcached-1.58.tar.gz";
1637 1637 md5 = "23b258105013d14d899828d334e6b044";
1638 1638 };
1639 1639 meta = {
1640 1640 license = [ pkgs.lib.licenses.psfl ];
1641 1641 };
1642 1642 };
1643 1643 python-pam = super.buildPythonPackage {
1644 1644 name = "python-pam-1.8.2";
1645 1645 buildInputs = with self; [];
1646 1646 doCheck = false;
1647 1647 propagatedBuildInputs = with self; [];
1648 1648 src = fetchurl {
1649 1649 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1650 1650 md5 = "db71b6b999246fb05d78ecfbe166629d";
1651 1651 };
1652 1652 meta = {
1653 1653 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1654 1654 };
1655 1655 };
1656 1656 pytz = super.buildPythonPackage {
1657 1657 name = "pytz-2015.4";
1658 1658 buildInputs = with self; [];
1659 1659 doCheck = false;
1660 1660 propagatedBuildInputs = with self; [];
1661 1661 src = fetchurl {
1662 1662 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1663 1663 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1664 1664 };
1665 1665 meta = {
1666 1666 license = [ pkgs.lib.licenses.mit ];
1667 1667 };
1668 1668 };
1669 1669 pyzmq = super.buildPythonPackage {
1670 1670 name = "pyzmq-14.6.0";
1671 1671 buildInputs = with self; [];
1672 1672 doCheck = false;
1673 1673 propagatedBuildInputs = with self; [];
1674 1674 src = fetchurl {
1675 1675 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1676 1676 md5 = "395b5de95a931afa5b14c9349a5b8024";
1677 1677 };
1678 1678 meta = {
1679 1679 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1680 1680 };
1681 1681 };
1682 1682 recaptcha-client = super.buildPythonPackage {
1683 1683 name = "recaptcha-client-1.0.6";
1684 1684 buildInputs = with self; [];
1685 1685 doCheck = false;
1686 1686 propagatedBuildInputs = with self; [];
1687 1687 src = fetchurl {
1688 1688 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1689 1689 md5 = "74228180f7e1fb76c4d7089160b0d919";
1690 1690 };
1691 1691 meta = {
1692 1692 license = [ { fullName = "MIT/X11"; } ];
1693 1693 };
1694 1694 };
1695 1695 repoze.lru = super.buildPythonPackage {
1696 1696 name = "repoze.lru-0.6";
1697 1697 buildInputs = with self; [];
1698 1698 doCheck = false;
1699 1699 propagatedBuildInputs = with self; [];
1700 1700 src = fetchurl {
1701 1701 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1702 1702 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1703 1703 };
1704 1704 meta = {
1705 1705 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1706 1706 };
1707 1707 };
1708 1708 requests = super.buildPythonPackage {
1709 1709 name = "requests-2.9.1";
1710 1710 buildInputs = with self; [];
1711 1711 doCheck = false;
1712 1712 propagatedBuildInputs = with self; [];
1713 1713 src = fetchurl {
1714 1714 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1715 1715 md5 = "0b7f480d19012ec52bab78292efd976d";
1716 1716 };
1717 1717 meta = {
1718 1718 license = [ pkgs.lib.licenses.asl20 ];
1719 1719 };
1720 1720 };
1721 1721 rhodecode-enterprise-ce = super.buildPythonPackage {
1722 1722 name = "rhodecode-enterprise-ce-4.9.0";
1723 1723 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
1724 1724 doCheck = true;
1725 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress cssselect celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu lxml msgpack-python nbconvert packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1725 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic cssselect celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu lxml msgpack-python nbconvert packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1726 1726 src = ./.;
1727 1727 meta = {
1728 1728 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1729 1729 };
1730 1730 };
1731 1731 rhodecode-tools = super.buildPythonPackage {
1732 1732 name = "rhodecode-tools-0.12.0";
1733 1733 buildInputs = with self; [];
1734 1734 doCheck = false;
1735 1735 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh];
1736 1736 src = fetchurl {
1737 1737 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4";
1738 1738 md5 = "9ca040356fa7e38d3f64529a4cffdca4";
1739 1739 };
1740 1740 meta = {
1741 1741 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1742 1742 };
1743 1743 };
1744 1744 scandir = super.buildPythonPackage {
1745 1745 name = "scandir-1.5";
1746 1746 buildInputs = with self; [];
1747 1747 doCheck = false;
1748 1748 propagatedBuildInputs = with self; [];
1749 1749 src = fetchurl {
1750 1750 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
1751 1751 md5 = "a2713043de681bba6b084be42e7a8a44";
1752 1752 };
1753 1753 meta = {
1754 1754 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1755 1755 };
1756 1756 };
1757 1757 setproctitle = super.buildPythonPackage {
1758 1758 name = "setproctitle-1.1.8";
1759 1759 buildInputs = with self; [];
1760 1760 doCheck = false;
1761 1761 propagatedBuildInputs = with self; [];
1762 1762 src = fetchurl {
1763 1763 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1764 1764 md5 = "728f4c8c6031bbe56083a48594027edd";
1765 1765 };
1766 1766 meta = {
1767 1767 license = [ pkgs.lib.licenses.bsdOriginal ];
1768 1768 };
1769 1769 };
1770 1770 setuptools = super.buildPythonPackage {
1771 1771 name = "setuptools-30.1.0";
1772 1772 buildInputs = with self; [];
1773 1773 doCheck = false;
1774 1774 propagatedBuildInputs = with self; [];
1775 1775 src = fetchurl {
1776 1776 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
1777 1777 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
1778 1778 };
1779 1779 meta = {
1780 1780 license = [ pkgs.lib.licenses.mit ];
1781 1781 };
1782 1782 };
1783 1783 setuptools-scm = super.buildPythonPackage {
1784 1784 name = "setuptools-scm-1.15.0";
1785 1785 buildInputs = with self; [];
1786 1786 doCheck = false;
1787 1787 propagatedBuildInputs = with self; [];
1788 1788 src = fetchurl {
1789 1789 url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz";
1790 1790 md5 = "b6916c78ed6253d6602444fad4279c5b";
1791 1791 };
1792 1792 meta = {
1793 1793 license = [ pkgs.lib.licenses.mit ];
1794 1794 };
1795 1795 };
1796 1796 simplegeneric = super.buildPythonPackage {
1797 1797 name = "simplegeneric-0.8.1";
1798 1798 buildInputs = with self; [];
1799 1799 doCheck = false;
1800 1800 propagatedBuildInputs = with self; [];
1801 1801 src = fetchurl {
1802 1802 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1803 1803 md5 = "f9c1fab00fd981be588fc32759f474e3";
1804 1804 };
1805 1805 meta = {
1806 1806 license = [ pkgs.lib.licenses.zpt21 ];
1807 1807 };
1808 1808 };
1809 1809 simplejson = super.buildPythonPackage {
1810 1810 name = "simplejson-3.11.1";
1811 1811 buildInputs = with self; [];
1812 1812 doCheck = false;
1813 1813 propagatedBuildInputs = with self; [];
1814 1814 src = fetchurl {
1815 1815 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
1816 1816 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
1817 1817 };
1818 1818 meta = {
1819 1819 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1820 1820 };
1821 1821 };
1822 1822 six = super.buildPythonPackage {
1823 1823 name = "six-1.9.0";
1824 1824 buildInputs = with self; [];
1825 1825 doCheck = false;
1826 1826 propagatedBuildInputs = with self; [];
1827 1827 src = fetchurl {
1828 1828 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1829 1829 md5 = "476881ef4012262dfc8adc645ee786c4";
1830 1830 };
1831 1831 meta = {
1832 1832 license = [ pkgs.lib.licenses.mit ];
1833 1833 };
1834 1834 };
1835 1835 subprocess32 = super.buildPythonPackage {
1836 1836 name = "subprocess32-3.2.7";
1837 1837 buildInputs = with self; [];
1838 1838 doCheck = false;
1839 1839 propagatedBuildInputs = with self; [];
1840 1840 src = fetchurl {
1841 1841 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
1842 1842 md5 = "824c801e479d3e916879aae3e9c15e16";
1843 1843 };
1844 1844 meta = {
1845 1845 license = [ pkgs.lib.licenses.psfl ];
1846 1846 };
1847 1847 };
1848 1848 supervisor = super.buildPythonPackage {
1849 1849 name = "supervisor-3.3.2";
1850 1850 buildInputs = with self; [];
1851 1851 doCheck = false;
1852 1852 propagatedBuildInputs = with self; [meld3];
1853 1853 src = fetchurl {
1854 1854 url = "https://pypi.python.org/packages/7b/17/88adf8cb25f80e2bc0d18e094fcd7ab300632ea00b601cbbbb84c2419eae/supervisor-3.3.2.tar.gz";
1855 1855 md5 = "04766d62864da13d6a12f7429e75314f";
1856 1856 };
1857 1857 meta = {
1858 1858 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1859 1859 };
1860 1860 };
1861 1861 termcolor = super.buildPythonPackage {
1862 1862 name = "termcolor-1.1.0";
1863 1863 buildInputs = with self; [];
1864 1864 doCheck = false;
1865 1865 propagatedBuildInputs = with self; [];
1866 1866 src = fetchurl {
1867 1867 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
1868 1868 md5 = "043e89644f8909d462fbbfa511c768df";
1869 1869 };
1870 1870 meta = {
1871 1871 license = [ pkgs.lib.licenses.mit ];
1872 1872 };
1873 1873 };
1874 1874 testpath = super.buildPythonPackage {
1875 1875 name = "testpath-0.3.1";
1876 1876 buildInputs = with self; [];
1877 1877 doCheck = false;
1878 1878 propagatedBuildInputs = with self; [];
1879 1879 src = fetchurl {
1880 1880 url = "https://pypi.python.org/packages/f4/8b/b71e9ee10e5f751e9d959bc750ab122ba04187f5aa52aabdc4e63b0e31a7/testpath-0.3.1.tar.gz";
1881 1881 md5 = "2cd5ed5522fda781bb497c9d80ae2fc9";
1882 1882 };
1883 1883 meta = {
1884 1884 license = [ pkgs.lib.licenses.mit ];
1885 1885 };
1886 1886 };
1887 1887 traitlets = super.buildPythonPackage {
1888 1888 name = "traitlets-4.3.2";
1889 1889 buildInputs = with self; [];
1890 1890 doCheck = false;
1891 1891 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
1892 1892 src = fetchurl {
1893 1893 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
1894 1894 md5 = "3068663f2f38fd939a9eb3a500ccc154";
1895 1895 };
1896 1896 meta = {
1897 1897 license = [ pkgs.lib.licenses.bsdOriginal ];
1898 1898 };
1899 1899 };
1900 1900 transifex-client = super.buildPythonPackage {
1901 1901 name = "transifex-client-0.10";
1902 1902 buildInputs = with self; [];
1903 1903 doCheck = false;
1904 1904 propagatedBuildInputs = with self; [];
1905 1905 src = fetchurl {
1906 1906 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1907 1907 md5 = "5549538d84b8eede6b254cd81ae024fa";
1908 1908 };
1909 1909 meta = {
1910 1910 license = [ pkgs.lib.licenses.gpl2 ];
1911 1911 };
1912 1912 };
1913 1913 translationstring = super.buildPythonPackage {
1914 1914 name = "translationstring-1.3";
1915 1915 buildInputs = with self; [];
1916 1916 doCheck = false;
1917 1917 propagatedBuildInputs = with self; [];
1918 1918 src = fetchurl {
1919 1919 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1920 1920 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1921 1921 };
1922 1922 meta = {
1923 1923 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1924 1924 };
1925 1925 };
1926 1926 trollius = super.buildPythonPackage {
1927 1927 name = "trollius-1.0.4";
1928 1928 buildInputs = with self; [];
1929 1929 doCheck = false;
1930 1930 propagatedBuildInputs = with self; [futures];
1931 1931 src = fetchurl {
1932 1932 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1933 1933 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1934 1934 };
1935 1935 meta = {
1936 1936 license = [ pkgs.lib.licenses.asl20 ];
1937 1937 };
1938 1938 };
1939 1939 uWSGI = super.buildPythonPackage {
1940 1940 name = "uWSGI-2.0.15";
1941 1941 buildInputs = with self; [];
1942 1942 doCheck = false;
1943 1943 propagatedBuildInputs = with self; [];
1944 1944 src = fetchurl {
1945 1945 url = "https://pypi.python.org/packages/bb/0a/45e5aa80dc135889594bb371c082d20fb7ee7303b174874c996888cc8511/uwsgi-2.0.15.tar.gz";
1946 1946 md5 = "fc50bd9e83b7602fa474b032167010a7";
1947 1947 };
1948 1948 meta = {
1949 1949 license = [ pkgs.lib.licenses.gpl2 ];
1950 1950 };
1951 1951 };
1952 1952 urllib3 = super.buildPythonPackage {
1953 1953 name = "urllib3-1.16";
1954 1954 buildInputs = with self; [];
1955 1955 doCheck = false;
1956 1956 propagatedBuildInputs = with self; [];
1957 1957 src = fetchurl {
1958 1958 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1959 1959 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1960 1960 };
1961 1961 meta = {
1962 1962 license = [ pkgs.lib.licenses.mit ];
1963 1963 };
1964 1964 };
1965 1965 venusian = super.buildPythonPackage {
1966 1966 name = "venusian-1.1.0";
1967 1967 buildInputs = with self; [];
1968 1968 doCheck = false;
1969 1969 propagatedBuildInputs = with self; [];
1970 1970 src = fetchurl {
1971 1971 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
1972 1972 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
1973 1973 };
1974 1974 meta = {
1975 1975 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1976 1976 };
1977 1977 };
1978 1978 waitress = super.buildPythonPackage {
1979 1979 name = "waitress-1.0.2";
1980 1980 buildInputs = with self; [];
1981 1981 doCheck = false;
1982 1982 propagatedBuildInputs = with self; [];
1983 1983 src = fetchurl {
1984 1984 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
1985 1985 md5 = "b968f39e95d609f6194c6e50425d4bb7";
1986 1986 };
1987 1987 meta = {
1988 1988 license = [ pkgs.lib.licenses.zpt21 ];
1989 1989 };
1990 1990 };
1991 1991 wcwidth = super.buildPythonPackage {
1992 1992 name = "wcwidth-0.1.7";
1993 1993 buildInputs = with self; [];
1994 1994 doCheck = false;
1995 1995 propagatedBuildInputs = with self; [];
1996 1996 src = fetchurl {
1997 1997 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1998 1998 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
1999 1999 };
2000 2000 meta = {
2001 2001 license = [ pkgs.lib.licenses.mit ];
2002 2002 };
2003 2003 };
2004 2004 ws4py = super.buildPythonPackage {
2005 2005 name = "ws4py-0.3.5";
2006 2006 buildInputs = with self; [];
2007 2007 doCheck = false;
2008 2008 propagatedBuildInputs = with self; [];
2009 2009 src = fetchurl {
2010 2010 url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip";
2011 2011 md5 = "a261b75c20b980e55ce7451a3576a867";
2012 2012 };
2013 2013 meta = {
2014 2014 license = [ pkgs.lib.licenses.bsdOriginal ];
2015 2015 };
2016 2016 };
2017 2017 wsgiref = super.buildPythonPackage {
2018 2018 name = "wsgiref-0.1.2";
2019 2019 buildInputs = with self; [];
2020 2020 doCheck = false;
2021 2021 propagatedBuildInputs = with self; [];
2022 2022 src = fetchurl {
2023 2023 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2024 2024 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
2025 2025 };
2026 2026 meta = {
2027 2027 license = [ { fullName = "PSF or ZPL"; } ];
2028 2028 };
2029 2029 };
2030 2030 zope.cachedescriptors = super.buildPythonPackage {
2031 2031 name = "zope.cachedescriptors-4.0.0";
2032 2032 buildInputs = with self; [];
2033 2033 doCheck = false;
2034 2034 propagatedBuildInputs = with self; [setuptools];
2035 2035 src = fetchurl {
2036 2036 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
2037 2037 md5 = "8d308de8c936792c8e758058fcb7d0f0";
2038 2038 };
2039 2039 meta = {
2040 2040 license = [ pkgs.lib.licenses.zpt21 ];
2041 2041 };
2042 2042 };
2043 2043 zope.deprecation = super.buildPythonPackage {
2044 2044 name = "zope.deprecation-4.1.2";
2045 2045 buildInputs = with self; [];
2046 2046 doCheck = false;
2047 2047 propagatedBuildInputs = with self; [setuptools];
2048 2048 src = fetchurl {
2049 2049 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
2050 2050 md5 = "e9a663ded58f4f9f7881beb56cae2782";
2051 2051 };
2052 2052 meta = {
2053 2053 license = [ pkgs.lib.licenses.zpt21 ];
2054 2054 };
2055 2055 };
2056 2056 zope.event = super.buildPythonPackage {
2057 2057 name = "zope.event-4.0.3";
2058 2058 buildInputs = with self; [];
2059 2059 doCheck = false;
2060 2060 propagatedBuildInputs = with self; [setuptools];
2061 2061 src = fetchurl {
2062 2062 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
2063 2063 md5 = "9a3780916332b18b8b85f522bcc3e249";
2064 2064 };
2065 2065 meta = {
2066 2066 license = [ pkgs.lib.licenses.zpt21 ];
2067 2067 };
2068 2068 };
2069 2069 zope.interface = super.buildPythonPackage {
2070 2070 name = "zope.interface-4.1.3";
2071 2071 buildInputs = with self; [];
2072 2072 doCheck = false;
2073 2073 propagatedBuildInputs = with self; [setuptools];
2074 2074 src = fetchurl {
2075 2075 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
2076 2076 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
2077 2077 };
2078 2078 meta = {
2079 2079 license = [ pkgs.lib.licenses.zpt21 ];
2080 2080 };
2081 2081 };
2082 2082
2083 2083 ### Test requirements
2084 2084
2085 2085
2086 2086 }
@@ -1,136 +1,135 b''
1 1 ## core
2 2 setuptools==30.1.0
3 3 setuptools-scm==1.15.0
4 4
5 5 amqplib==1.0.2
6 6 anyjson==0.3.3
7 7 authomatic==0.1.0.post1
8 8 Babel==1.3
9 backport-ipaddress==0.1
10 9 Beaker==1.9.0
11 10 celery==2.2.10
12 11 Chameleon==2.24
13 12 channelstream==0.5.2
14 13 click==5.1
15 14 colander==1.3.3
16 15 configobj==5.0.6
17 16 cssselect==1.0.1
18 17 decorator==4.0.11
19 18 deform==2.0.4
20 19 docutils==0.13.1
21 20 dogpile.cache==0.6.4
22 21 dogpile.core==0.4.1
23 22 ecdsa==0.11
24 23 FormEncode==1.2.4
25 24 future==0.14.3
26 25 futures==3.0.2
27 26 gnureadline==6.3.3
28 27 infrae.cache==1.0.1
29 28 iso8601==0.1.11
30 29 itsdangerous==0.24
31 30 Jinja2==2.7.3
32 31 kombu==1.5.1
33 32 lxml==3.7.3
34 33 Mako==1.0.6
35 34 Markdown==2.6.8
36 35 MarkupSafe==0.23
37 36 meld3==1.0.2
38 37 msgpack-python==0.4.8
39 38 MySQL-python==1.2.5
40 39 nose==1.3.6
41 40 objgraph==3.1.0
42 41 packaging==15.2
43 42 paramiko==1.15.1
44 43 Paste==2.0.3
45 44 PasteDeploy==1.5.2
46 45 PasteScript==1.7.5
47 46 pathlib2==2.3.0
48 47 psutil==4.3.1
49 48 psycopg2==2.7.1
50 49 py-bcrypt==0.4
51 50 pycrypto==2.6.1
52 51 pycurl==7.19.5
53 52 pyflakes==0.8.1
54 53 pygments-markdown-lexer==0.1.0.dev39
55 54 Pygments==2.2.0
56 55 pyparsing==1.5.7
57 56 pyramid-beaker==0.8
58 pyramid-debugtoolbar==3.0.5
57 pyramid-debugtoolbar==4.2.1
59 58 pyramid-jinja2==2.5
60 59 pyramid-mako==1.0.2
61 60 pyramid==1.9.0
62 61 pysqlite==2.8.3
63 62 python-dateutil==2.1
64 63 python-ldap==2.4.40
65 64 python-memcached==1.58
66 65 python-pam==1.8.2
67 66 pytz==2015.4
68 67 pyzmq==14.6.0
69 68 recaptcha-client==1.0.6
70 69 repoze.lru==0.6
71 70 requests==2.9.1
72 71 Routes==1.13
73 72 setproctitle==1.1.8
74 73 simplejson==3.11.1
75 74 six==1.9.0
76 75 Sphinx==1.2.2
77 76 SQLAlchemy==0.9.9
78 77 subprocess32==3.2.7
79 78 supervisor==3.3.2
80 79 Tempita==0.5.2
81 80 translationstring==1.3
82 81 trollius==1.0.4
83 82 urllib3==1.16
84 83 URLObject==2.4.0
85 84 venusian==1.1.0
86 85 WebError==0.10.3
87 86 WebHelpers2==2.0
88 87 WebHelpers==1.3
89 88 WebOb==1.7.3
90 89 Whoosh==2.7.4
91 90 wsgiref==0.1.2
92 91 zope.cachedescriptors==4.0.0
93 92 zope.deprecation==4.1.2
94 93 zope.event==4.0.3
95 94 zope.interface==4.1.3
96 95
97 96 ## customized/patched libs
98 97 # our patched version of Pylons==1.0.2
99 98 https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1
100 99 # not released py-gfm==0.1.3
101 100 https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1
102 101
103 102 # IPYTHON RENDERING
104 103 # entrypoints backport, pypi version doesn't support egg installs
105 104 https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313#egg=entrypoints==0.2.2.rhodecode-upstream1
106 105 nbconvert==5.1.1
107 106 nbformat==4.3.0
108 107 jupyter_client==5.0.0
109 108
110 109 ## cli tools
111 110 alembic==0.9.2
112 111 invoke==0.13.0
113 112 bumpversion==0.5.3
114 113 transifex-client==0.10
115 114
116 115 ## http servers
117 116 gevent==1.2.2
118 117 greenlet==0.4.12
119 118 gunicorn==19.7.1
120 119 waitress==1.0.2
121 120 uWSGI==2.0.15
122 121
123 122 ## debug
124 123 ipdb==0.10.3
125 124 ipython==5.1.0
126 125 CProfileV==1.0.7
127 126 bottle==0.12.8
128 127
129 128 ## rhodecode-tools, special case
130 129 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4#egg=rhodecode-tools==0.12.0
131 130
132 131 ## appenlight
133 132 appenlight-client==0.6.21
134 133
135 134 ## test related requirements
136 135 -r requirements_test.txt
@@ -1,388 +1,385 b''
1 1 {
2 2 "libnghttp2-1.7.1": {
3 3 "MIT License": "http://spdx.org/licenses/MIT"
4 4 },
5 5 "nodejs-4.3.1": {
6 6 "MIT License": "http://spdx.org/licenses/MIT"
7 7 },
8 8 "python-2.7.12": {
9 9 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
10 10 },
11 11 "python2.7-Babel-1.3": {
12 12 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
13 13 },
14 14 "python2.7-Beaker-1.7.0": {
15 15 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
16 16 },
17 17 "python2.7-Chameleon-2.24": {
18 18 "BSD-like": "http://repoze.org/license.html"
19 19 },
20 20 "python2.7-FormEncode-1.2.4": {
21 21 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
22 22 },
23 23 "python2.7-Jinja2-2.7.3": {
24 24 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
25 25 },
26 26 "python2.7-Mako-1.0.6": {
27 27 "MIT License": "http://spdx.org/licenses/MIT"
28 28 },
29 29 "python2.7-Markdown-2.6.7": {
30 30 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
31 31 },
32 32 "python2.7-MarkupSafe-0.23": {
33 33 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
34 34 },
35 35 "python2.7-Paste-2.0.3": {
36 36 "MIT License": "http://spdx.org/licenses/MIT"
37 37 },
38 38 "python2.7-PasteDeploy-1.5.2": {
39 39 "MIT License": "http://spdx.org/licenses/MIT"
40 40 },
41 41 "python2.7-PasteScript-1.7.5": {
42 42 "MIT License": "http://spdx.org/licenses/MIT"
43 43 },
44 44 "python2.7-Pygments-2.2.0": {
45 45 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
46 46 },
47 47 "python2.7-Pylons-1.0.2.rhodecode-patch1": {
48 48 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
49 49 },
50 50 "python2.7-Routes-1.13": {
51 51 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
52 52 },
53 53 "python2.7-SQLAlchemy-0.9.9": {
54 54 "MIT License": "http://spdx.org/licenses/MIT"
55 55 },
56 56 "python2.7-Tempita-0.5.2": {
57 57 "MIT License": "http://spdx.org/licenses/MIT"
58 58 },
59 59 "python2.7-URLObject-2.4.0": {
60 60 "The Unlicense": "http://unlicense.org/"
61 61 },
62 62 "python2.7-WebError-0.10.3": {
63 63 "MIT License": "http://spdx.org/licenses/MIT"
64 64 },
65 65 "python2.7-WebHelpers-1.3": {
66 66 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
67 67 },
68 68 "python2.7-WebHelpers2-2.0": {
69 69 "MIT License": "http://spdx.org/licenses/MIT"
70 70 },
71 71 "python2.7-WebOb-1.3.1": {
72 72 "MIT License": "http://spdx.org/licenses/MIT"
73 73 },
74 74 "python2.7-Whoosh-2.7.4": {
75 75 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause",
76 76 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
77 77 },
78 78 "python2.7-alembic-0.8.4": {
79 79 "MIT License": "http://spdx.org/licenses/MIT"
80 80 },
81 81 "python2.7-amqplib-1.0.2": {
82 82 "GNU Lesser General Public License v3.0 only": "http://spdx.org/licenses/LGPL-3.0"
83 83 },
84 84 "python2.7-anyjson-0.3.3": {
85 85 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
86 86 },
87 87 "python2.7-appenlight-client-0.6.14": {
88 88 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
89 89 },
90 90 "python2.7-authomatic-0.1.0.post1": {
91 91 "MIT License": "http://spdx.org/licenses/MIT"
92 },
93 "python2.7-backport-ipaddress-0.1": {
94 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
95 },
92 },
96 93 "python2.7-backports.shutil-get-terminal-size-1.0.0": {
97 94 "MIT License": "http://spdx.org/licenses/MIT"
98 95 },
99 96 "python2.7-bleach-1.5.0": {
100 97 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
101 98 },
102 99 "python2.7-celery-2.2.10": {
103 100 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
104 101 },
105 102 "python2.7-channelstream-0.5.2": {
106 103 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
107 104 },
108 105 "python2.7-click-5.1": {
109 106 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
110 107 },
111 108 "python2.7-colander-1.2": {
112 109 "Repoze License": "http://www.repoze.org/LICENSE.txt"
113 110 },
114 111 "python2.7-configobj-5.0.6": {
115 112 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
116 113 },
117 114 "python2.7-configparser-3.5.0": {
118 115 "MIT License": "http://spdx.org/licenses/MIT"
119 116 },
120 117 "python2.7-cssselect-1.0.1": {
121 118 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
122 119 },
123 120 "python2.7-decorator-4.0.11": {
124 121 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
125 122 },
126 123 "python2.7-deform-2.0a2": {
127 124 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
128 125 },
129 126 "python2.7-docutils-0.12": {
130 127 "BSD 2-clause \"Simplified\" License": "http://spdx.org/licenses/BSD-2-Clause"
131 128 },
132 129 "python2.7-dogpile.cache-0.6.1": {
133 130 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
134 131 },
135 132 "python2.7-dogpile.core-0.4.1": {
136 133 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
137 134 },
138 135 "python2.7-elasticsearch-2.3.0": {
139 136 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
140 137 },
141 138 "python2.7-elasticsearch-dsl-2.2.0": {
142 139 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
143 140 },
144 141 "python2.7-entrypoints-0.2.2": {
145 142 "MIT License": "http://spdx.org/licenses/MIT"
146 143 },
147 144 "python2.7-enum34-1.1.6": {
148 145 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
149 146 },
150 147 "python2.7-functools32-3.2.3.post2": {
151 148 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
152 149 },
153 150 "python2.7-future-0.14.3": {
154 151 "MIT License": "http://spdx.org/licenses/MIT"
155 152 },
156 153 "python2.7-futures-3.0.2": {
157 154 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
158 155 },
159 156 "python2.7-gevent-1.1.2": {
160 157 "MIT License": "http://spdx.org/licenses/MIT"
161 158 },
162 159 "python2.7-gnureadline-6.3.3": {
163 160 "GNU General Public License v1.0 only": "http://spdx.org/licenses/GPL-1.0"
164 161 },
165 162 "python2.7-gprof2dot-2016.10.13": {
166 163 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
167 164 },
168 165 "python2.7-greenlet-0.4.10": {
169 166 "MIT License": "http://spdx.org/licenses/MIT"
170 167 },
171 168 "python2.7-gunicorn-19.6.0": {
172 169 "MIT License": "http://spdx.org/licenses/MIT"
173 170 },
174 171 "python2.7-html5lib-0.9999999": {
175 172 "MIT License": "http://spdx.org/licenses/MIT"
176 173 },
177 174 "python2.7-infrae.cache-1.0.1": {
178 175 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
179 176 },
180 177 "python2.7-ipython-5.1.0": {
181 178 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
182 179 },
183 180 "python2.7-ipython-genutils-0.2.0": {
184 181 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
185 182 },
186 183 "python2.7-iso8601-0.1.11": {
187 184 "MIT License": "http://spdx.org/licenses/MIT"
188 185 },
189 186 "python2.7-itsdangerous-0.24": {
190 187 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
191 188 },
192 189 "python2.7-jsonschema-2.6.0": {
193 190 "MIT License": "http://spdx.org/licenses/MIT"
194 191 },
195 192 "python2.7-jupyter-client-5.0.0": {
196 193 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
197 194 },
198 195 "python2.7-jupyter-core-4.3.0": {
199 196 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
200 197 },
201 198 "python2.7-kombu-1.5.1": {
202 199 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
203 200 },
204 201 "python2.7-mistune-0.7.4": {
205 202 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
206 203 },
207 204 "python2.7-msgpack-python-0.4.8": {
208 205 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
209 206 },
210 207 "python2.7-nbconvert-5.1.1": {
211 208 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
212 209 },
213 210 "python2.7-nbformat-4.3.0": {
214 211 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
215 212 },
216 213 "python2.7-packaging-15.2": {
217 214 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
218 215 },
219 216 "python2.7-pandocfilters-1.4.1": {
220 217 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
221 218 },
222 219 "python2.7-pathlib2-2.1.0": {
223 220 "MIT License": "http://spdx.org/licenses/MIT"
224 221 },
225 222 "python2.7-peppercorn-0.5": {
226 223 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
227 224 },
228 225 "python2.7-pexpect-4.2.1": {
229 226 "ISC License": "http://spdx.org/licenses/ISC"
230 227 },
231 228 "python2.7-pickleshare-0.7.4": {
232 229 "MIT License": "http://spdx.org/licenses/MIT"
233 230 },
234 231 "python2.7-prompt-toolkit-1.0.14": {
235 232 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
236 233 },
237 234 "python2.7-psutil-4.3.1": {
238 235 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
239 236 },
240 237 "python2.7-psycopg2-2.6.1": {
241 238 "GNU Lesser General Public License v3.0 or later": "http://spdx.org/licenses/LGPL-3.0+"
242 239 },
243 240 "python2.7-ptyprocess-0.5.1": {
244 241 "ISC License": "http://opensource.org/licenses/ISC"
245 242 },
246 243 "python2.7-py-1.4.31": {
247 244 "MIT License": "http://spdx.org/licenses/MIT"
248 245 },
249 246 "python2.7-py-bcrypt-0.4": {
250 247 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
251 248 },
252 249 "python2.7-py-gfm-0.1.3.rhodecode-upstream1": {
253 250 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
254 251 },
255 252 "python2.7-pycrypto-2.6.1": {
256 253 "Public Domain": null
257 254 },
258 255 "python2.7-pycurl-7.19.5": {
259 256 "MIT License": "http://spdx.org/licenses/MIT"
260 257 },
261 258 "python2.7-pygments-markdown-lexer-0.1.0.dev39": {
262 259 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
263 260 },
264 261 "python2.7-pyparsing-1.5.7": {
265 262 "MIT License": "http://spdx.org/licenses/MIT"
266 263 },
267 264 "python2.7-pyramid-1.7.4": {
268 265 "Repoze License": "http://www.repoze.org/LICENSE.txt"
269 266 },
270 267 "python2.7-pyramid-beaker-0.8": {
271 268 "Repoze License": "http://www.repoze.org/LICENSE.txt"
272 269 },
273 270 "python2.7-pyramid-debugtoolbar-3.0.5": {
274 271 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause",
275 272 "Repoze License": "http://www.repoze.org/LICENSE.txt"
276 273 },
277 274 "python2.7-pyramid-jinja2-2.5": {
278 275 "BSD-derived": "http://www.repoze.org/LICENSE.txt"
279 276 },
280 277 "python2.7-pyramid-mako-1.0.2": {
281 278 "Repoze License": "http://www.repoze.org/LICENSE.txt"
282 279 },
283 280 "python2.7-pysqlite-2.6.3": {
284 281 "libpng License": "http://spdx.org/licenses/Libpng",
285 282 "zlib License": "http://spdx.org/licenses/Zlib"
286 283 },
287 284 "python2.7-pytest-3.0.5": {
288 285 "MIT License": "http://spdx.org/licenses/MIT"
289 286 },
290 287 "python2.7-pytest-profiling-1.2.2": {
291 288 "MIT License": "http://spdx.org/licenses/MIT"
292 289 },
293 290 "python2.7-pytest-runner-2.9": {
294 291 "MIT License": "http://spdx.org/licenses/MIT"
295 292 },
296 293 "python2.7-pytest-sugar-0.7.1": {
297 294 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
298 295 },
299 296 "python2.7-pytest-timeout-1.2.0": {
300 297 "MIT License": "http://spdx.org/licenses/MIT"
301 298 },
302 299 "python2.7-python-dateutil-2.1": {
303 300 "Simplified BSD": null
304 301 },
305 302 "python2.7-python-editor-1.0.3": {
306 303 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
307 304 },
308 305 "python2.7-python-ldap-2.4.19": {
309 306 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
310 307 },
311 308 "python2.7-python-memcached-1.57": {
312 309 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
313 310 },
314 311 "python2.7-pytz-2015.4": {
315 312 "MIT License": "http://spdx.org/licenses/MIT"
316 313 },
317 314 "python2.7-pyzmq-14.6.0": {
318 315 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
319 316 },
320 317 "python2.7-recaptcha-client-1.0.6": {
321 318 "MIT License": "http://spdx.org/licenses/MIT"
322 319 },
323 320 "python2.7-repoze.lru-0.6": {
324 321 "Repoze License": "http://www.repoze.org/LICENSE.txt"
325 322 },
326 323 "python2.7-requests-2.9.1": {
327 324 "Apache License 2.0": "http://spdx.org/licenses/Apache-2.0"
328 325 },
329 326 "python2.7-setuptools-19.4": {
330 327 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0",
331 328 "Zope Public License 2.0": "http://spdx.org/licenses/ZPL-2.0"
332 329 },
333 330 "python2.7-setuptools-scm-1.15.0": {
334 331 "MIT License": "http://spdx.org/licenses/MIT"
335 332 },
336 333 "python2.7-simplegeneric-0.8.1": {
337 334 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
338 335 },
339 336 "python2.7-simplejson-3.7.2": {
340 337 "MIT License": "http://spdx.org/licenses/MIT"
341 338 },
342 339 "python2.7-six-1.9.0": {
343 340 "MIT License": "http://spdx.org/licenses/MIT"
344 341 },
345 342 "python2.7-subprocess32-3.2.6": {
346 343 "Python Software Foundation License version 2": "http://spdx.org/licenses/Python-2.0"
347 344 },
348 345 "python2.7-termcolor-1.1.0": {
349 346 "MIT License": "http://spdx.org/licenses/MIT"
350 347 },
351 348 "python2.7-testpath-0.1": {
352 349 "MIT License": "http://spdx.org/licenses/MIT"
353 350 },
354 351 "python2.7-traitlets-4.3.2": {
355 352 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
356 353 },
357 354 "python2.7-translationstring-1.3": {
358 355 "Repoze License": "http://www.repoze.org/LICENSE.txt"
359 356 },
360 357 "python2.7-urllib3-1.16": {
361 358 "MIT License": "http://spdx.org/licenses/MIT"
362 359 },
363 360 "python2.7-venusian-1.0": {
364 361 "Repoze License": "http://www.repoze.org/LICENSE.txt"
365 362 },
366 363 "python2.7-waitress-1.0.1": {
367 364 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
368 365 },
369 366 "python2.7-wcwidth-0.1.7": {
370 367 "MIT License": "http://spdx.org/licenses/MIT"
371 368 },
372 369 "python2.7-ws4py-0.3.5": {
373 370 "BSD 4-clause \"Original\" or \"Old\" License": "http://spdx.org/licenses/BSD-4-Clause"
374 371 },
375 372 "python2.7-zope.cachedescriptors-4.0.0": {
376 373 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
377 374 },
378 375 "python2.7-zope.deprecation-4.1.2": {
379 376 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
380 377 },
381 378 "python2.7-zope.interface-4.1.3": {
382 379 "Zope Public License 2.1": "http://spdx.org/licenses/ZPL-2.1"
383 380 },
384 381 "xz-5.2.2": {
385 382 "GNU General Public License v2.0 or later": "http://spdx.org/licenses/GPL-2.0+",
386 383 "GNU Library General Public License v2.1 or later": "http://spdx.org/licenses/LGPL-2.1+"
387 384 }
388 385 } No newline at end of file
@@ -1,2026 +1,2027 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26 import inspect
27 27 import collections
28 28 import fnmatch
29 29 import hashlib
30 30 import itertools
31 31 import logging
32 32 import random
33 33 import traceback
34 34 from functools import wraps
35 35
36 36 import ipaddress
37 37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
38 38 from pylons.i18n.translation import _
39 39 # NOTE(marcink): this has to be removed only after pyramid migration,
40 40 # replace with _ = request.translate
41 41 from sqlalchemy.orm.exc import ObjectDeletedError
42 42 from sqlalchemy.orm import joinedload
43 43 from zope.cachedescriptors.property import Lazy as LazyProperty
44 44
45 45 import rhodecode
46 46 from rhodecode.model import meta
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.user import UserModel
49 49 from rhodecode.model.db import (
50 50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 51 UserIpMap, UserApiKeys, RepoGroup)
52 52 from rhodecode.lib import caches
53 53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
54 54 from rhodecode.lib.utils import (
55 55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 56 from rhodecode.lib.caching_query import FromCache
57 57
58 58
59 59 if rhodecode.is_unix:
60 60 import bcrypt
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 csrf_token_key = "csrf_token"
65 65
66 66
67 67 class PasswordGenerator(object):
68 68 """
69 69 This is a simple class for generating password from different sets of
70 70 characters
71 71 usage::
72 72
73 73 passwd_gen = PasswordGenerator()
74 74 #print 8-letter password containing only big and small letters
75 75 of alphabet
76 76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 77 """
78 78 ALPHABETS_NUM = r'''1234567890'''
79 79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88 88
89 89 def __init__(self, passwd=''):
90 90 self.passwd = passwd
91 91
92 92 def gen_password(self, length, type_=None):
93 93 if type_ is None:
94 94 type_ = self.ALPHABETS_FULL
95 95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
96 96 return self.passwd
97 97
98 98
99 99 class _RhodeCodeCryptoBase(object):
100 100 ENC_PREF = None
101 101
102 102 def hash_create(self, str_):
103 103 """
104 104 hash the string using
105 105
106 106 :param str_: password to hash
107 107 """
108 108 raise NotImplementedError
109 109
110 110 def hash_check_with_upgrade(self, password, hashed):
111 111 """
112 112 Returns tuple in which first element is boolean that states that
113 113 given password matches it's hashed version, and the second is new hash
114 114 of the password, in case this password should be migrated to new
115 115 cipher.
116 116 """
117 117 checked_hash = self.hash_check(password, hashed)
118 118 return checked_hash, None
119 119
120 120 def hash_check(self, password, hashed):
121 121 """
122 122 Checks matching password with it's hashed value.
123 123
124 124 :param password: password
125 125 :param hashed: password in hashed form
126 126 """
127 127 raise NotImplementedError
128 128
129 129 def _assert_bytes(self, value):
130 130 """
131 131 Passing in an `unicode` object can lead to hard to detect issues
132 132 if passwords contain non-ascii characters. Doing a type check
133 133 during runtime, so that such mistakes are detected early on.
134 134 """
135 135 if not isinstance(value, str):
136 136 raise TypeError(
137 137 "Bytestring required as input, got %r." % (value, ))
138 138
139 139
140 140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 141 ENC_PREF = ('$2a$10', '$2b$10')
142 142
143 143 def hash_create(self, str_):
144 144 self._assert_bytes(str_)
145 145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146 146
147 147 def hash_check_with_upgrade(self, password, hashed):
148 148 """
149 149 Returns tuple in which first element is boolean that states that
150 150 given password matches it's hashed version, and the second is new hash
151 151 of the password, in case this password should be migrated to new
152 152 cipher.
153 153
154 154 This implements special upgrade logic which works like that:
155 155 - check if the given password == bcrypted hash, if yes then we
156 156 properly used password and it was already in bcrypt. Proceed
157 157 without any changes
158 158 - if bcrypt hash check is not working try with sha256. If hash compare
159 159 is ok, it means we using correct but old hashed password. indicate
160 160 hash change and proceed
161 161 """
162 162
163 163 new_hash = None
164 164
165 165 # regular pw check
166 166 password_match_bcrypt = self.hash_check(password, hashed)
167 167
168 168 # now we want to know if the password was maybe from sha256
169 169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 170 if not password_match_bcrypt:
171 171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 172 new_hash = self.hash_create(password) # make new bcrypt hash
173 173 password_match_bcrypt = True
174 174
175 175 return password_match_bcrypt, new_hash
176 176
177 177 def hash_check(self, password, hashed):
178 178 """
179 179 Checks matching password with it's hashed value.
180 180
181 181 :param password: password
182 182 :param hashed: password in hashed form
183 183 """
184 184 self._assert_bytes(password)
185 185 try:
186 186 return bcrypt.hashpw(password, hashed) == hashed
187 187 except ValueError as e:
188 188 # we're having a invalid salt here probably, we should not crash
189 189 # just return with False as it would be a wrong password.
190 190 log.debug('Failed to check password hash using bcrypt %s',
191 191 safe_str(e))
192 192
193 193 return False
194 194
195 195
196 196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 197 ENC_PREF = '_'
198 198
199 199 def hash_create(self, str_):
200 200 self._assert_bytes(str_)
201 201 return hashlib.sha256(str_).hexdigest()
202 202
203 203 def hash_check(self, password, hashed):
204 204 """
205 205 Checks matching password with it's hashed value.
206 206
207 207 :param password: password
208 208 :param hashed: password in hashed form
209 209 """
210 210 self._assert_bytes(password)
211 211 return hashlib.sha256(password).hexdigest() == hashed
212 212
213 213
214 214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
215 215 ENC_PREF = '_'
216 216
217 217 def hash_create(self, str_):
218 218 self._assert_bytes(str_)
219 219 return hashlib.md5(str_).hexdigest()
220 220
221 221 def hash_check(self, password, hashed):
222 222 """
223 223 Checks matching password with it's hashed value.
224 224
225 225 :param password: password
226 226 :param hashed: password in hashed form
227 227 """
228 228 self._assert_bytes(password)
229 229 return hashlib.md5(password).hexdigest() == hashed
230 230
231 231
232 232 def crypto_backend():
233 233 """
234 234 Return the matching crypto backend.
235 235
236 236 Selection is based on if we run tests or not, we pick md5 backend to run
237 237 tests faster since BCRYPT is expensive to calculate
238 238 """
239 239 if rhodecode.is_test:
240 240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
241 241 else:
242 242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243 243
244 244 return RhodeCodeCrypto
245 245
246 246
247 247 def get_crypt_password(password):
248 248 """
249 249 Create the hash of `password` with the active crypto backend.
250 250
251 251 :param password: The cleartext password.
252 252 :type password: unicode
253 253 """
254 254 password = safe_str(password)
255 255 return crypto_backend().hash_create(password)
256 256
257 257
258 258 def check_password(password, hashed):
259 259 """
260 260 Check if the value in `password` matches the hash in `hashed`.
261 261
262 262 :param password: The cleartext password.
263 263 :type password: unicode
264 264
265 265 :param hashed: The expected hashed version of the password.
266 266 :type hashed: The hash has to be passed in in text representation.
267 267 """
268 268 password = safe_str(password)
269 269 return crypto_backend().hash_check(password, hashed)
270 270
271 271
272 272 def generate_auth_token(data, salt=None):
273 273 """
274 274 Generates API KEY from given string
275 275 """
276 276
277 277 if salt is None:
278 278 salt = os.urandom(16)
279 279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
280 280
281 281
282 282 class CookieStoreWrapper(object):
283 283
284 284 def __init__(self, cookie_store):
285 285 self.cookie_store = cookie_store
286 286
287 287 def __repr__(self):
288 288 return 'CookieStore<%s>' % (self.cookie_store)
289 289
290 290 def get(self, key, other=None):
291 291 if isinstance(self.cookie_store, dict):
292 292 return self.cookie_store.get(key, other)
293 293 elif isinstance(self.cookie_store, AuthUser):
294 294 return self.cookie_store.__dict__.get(key, other)
295 295
296 296
297 297 def _cached_perms_data(user_id, scope, user_is_admin,
298 298 user_inherit_default_permissions, explicit, algo):
299 299
300 300 permissions = PermissionCalculator(
301 301 user_id, scope, user_is_admin, user_inherit_default_permissions,
302 302 explicit, algo)
303 303 return permissions.calculate()
304 304
305 305
306 306 class PermOrigin(object):
307 307 ADMIN = 'superadmin'
308 308
309 309 REPO_USER = 'user:%s'
310 310 REPO_USERGROUP = 'usergroup:%s'
311 311 REPO_OWNER = 'repo.owner'
312 312 REPO_DEFAULT = 'repo.default'
313 313 REPO_PRIVATE = 'repo.private'
314 314
315 315 REPOGROUP_USER = 'user:%s'
316 316 REPOGROUP_USERGROUP = 'usergroup:%s'
317 317 REPOGROUP_OWNER = 'group.owner'
318 318 REPOGROUP_DEFAULT = 'group.default'
319 319
320 320 USERGROUP_USER = 'user:%s'
321 321 USERGROUP_USERGROUP = 'usergroup:%s'
322 322 USERGROUP_OWNER = 'usergroup.owner'
323 323 USERGROUP_DEFAULT = 'usergroup.default'
324 324
325 325
326 326 class PermOriginDict(dict):
327 327 """
328 328 A special dict used for tracking permissions along with their origins.
329 329
330 330 `__setitem__` has been overridden to expect a tuple(perm, origin)
331 331 `__getitem__` will return only the perm
332 332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
333 333
334 334 >>> perms = PermOriginDict()
335 335 >>> perms['resource'] = 'read', 'default'
336 336 >>> perms['resource']
337 337 'read'
338 338 >>> perms['resource'] = 'write', 'admin'
339 339 >>> perms['resource']
340 340 'write'
341 341 >>> perms.perm_origin_stack
342 342 {'resource': [('read', 'default'), ('write', 'admin')]}
343 343 """
344 344
345 345 def __init__(self, *args, **kw):
346 346 dict.__init__(self, *args, **kw)
347 347 self.perm_origin_stack = {}
348 348
349 349 def __setitem__(self, key, (perm, origin)):
350 350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
351 351 dict.__setitem__(self, key, perm)
352 352
353 353
354 354 class PermissionCalculator(object):
355 355
356 356 def __init__(
357 357 self, user_id, scope, user_is_admin,
358 358 user_inherit_default_permissions, explicit, algo):
359 359 self.user_id = user_id
360 360 self.user_is_admin = user_is_admin
361 361 self.inherit_default_permissions = user_inherit_default_permissions
362 362 self.explicit = explicit
363 363 self.algo = algo
364 364
365 365 scope = scope or {}
366 366 self.scope_repo_id = scope.get('repo_id')
367 367 self.scope_repo_group_id = scope.get('repo_group_id')
368 368 self.scope_user_group_id = scope.get('user_group_id')
369 369
370 370 self.default_user_id = User.get_default_user(cache=True).user_id
371 371
372 372 self.permissions_repositories = PermOriginDict()
373 373 self.permissions_repository_groups = PermOriginDict()
374 374 self.permissions_user_groups = PermOriginDict()
375 375 self.permissions_global = set()
376 376
377 377 self.default_repo_perms = Permission.get_default_repo_perms(
378 378 self.default_user_id, self.scope_repo_id)
379 379 self.default_repo_groups_perms = Permission.get_default_group_perms(
380 380 self.default_user_id, self.scope_repo_group_id)
381 381 self.default_user_group_perms = \
382 382 Permission.get_default_user_group_perms(
383 383 self.default_user_id, self.scope_user_group_id)
384 384
385 385 def calculate(self):
386 386 if self.user_is_admin:
387 387 return self._admin_permissions()
388 388
389 389 self._calculate_global_default_permissions()
390 390 self._calculate_global_permissions()
391 391 self._calculate_default_permissions()
392 392 self._calculate_repository_permissions()
393 393 self._calculate_repository_group_permissions()
394 394 self._calculate_user_group_permissions()
395 395 return self._permission_structure()
396 396
397 397 def _admin_permissions(self):
398 398 """
399 399 admin user have all default rights for repositories
400 400 and groups set to admin
401 401 """
402 402 self.permissions_global.add('hg.admin')
403 403 self.permissions_global.add('hg.create.write_on_repogroup.true')
404 404
405 405 # repositories
406 406 for perm in self.default_repo_perms:
407 407 r_k = perm.UserRepoToPerm.repository.repo_name
408 408 p = 'repository.admin'
409 409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
410 410
411 411 # repository groups
412 412 for perm in self.default_repo_groups_perms:
413 413 rg_k = perm.UserRepoGroupToPerm.group.group_name
414 414 p = 'group.admin'
415 415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
416 416
417 417 # user groups
418 418 for perm in self.default_user_group_perms:
419 419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
420 420 p = 'usergroup.admin'
421 421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
422 422
423 423 return self._permission_structure()
424 424
425 425 def _calculate_global_default_permissions(self):
426 426 """
427 427 global permissions taken from the default user
428 428 """
429 429 default_global_perms = UserToPerm.query()\
430 430 .filter(UserToPerm.user_id == self.default_user_id)\
431 431 .options(joinedload(UserToPerm.permission))
432 432
433 433 for perm in default_global_perms:
434 434 self.permissions_global.add(perm.permission.permission_name)
435 435
436 436 def _calculate_global_permissions(self):
437 437 """
438 438 Set global system permissions with user permissions or permissions
439 439 taken from the user groups of the current user.
440 440
441 441 The permissions include repo creating, repo group creating, forking
442 442 etc.
443 443 """
444 444
445 445 # now we read the defined permissions and overwrite what we have set
446 446 # before those can be configured from groups or users explicitly.
447 447
448 448 # TODO: johbo: This seems to be out of sync, find out the reason
449 449 # for the comment below and update it.
450 450
451 451 # In case we want to extend this list we should be always in sync with
452 452 # User.DEFAULT_USER_PERMISSIONS definitions
453 453 _configurable = frozenset([
454 454 'hg.fork.none', 'hg.fork.repository',
455 455 'hg.create.none', 'hg.create.repository',
456 456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
457 457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
458 458 'hg.create.write_on_repogroup.false',
459 459 'hg.create.write_on_repogroup.true',
460 460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
461 461 ])
462 462
463 463 # USER GROUPS comes first user group global permissions
464 464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
465 465 .options(joinedload(UserGroupToPerm.permission))\
466 466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
467 467 UserGroupMember.users_group_id))\
468 468 .filter(UserGroupMember.user_id == self.user_id)\
469 469 .order_by(UserGroupToPerm.users_group_id)\
470 470 .all()
471 471
472 472 # need to group here by groups since user can be in more than
473 473 # one group, so we get all groups
474 474 _explicit_grouped_perms = [
475 475 [x, list(y)] for x, y in
476 476 itertools.groupby(user_perms_from_users_groups,
477 477 lambda _x: _x.users_group)]
478 478
479 479 for gr, perms in _explicit_grouped_perms:
480 480 # since user can be in multiple groups iterate over them and
481 481 # select the lowest permissions first (more explicit)
482 482 # TODO: marcink: do this^^
483 483
484 484 # group doesn't inherit default permissions so we actually set them
485 485 if not gr.inherit_default_permissions:
486 486 # NEED TO IGNORE all previously set configurable permissions
487 487 # and replace them with explicitly set from this user
488 488 # group permissions
489 489 self.permissions_global = self.permissions_global.difference(
490 490 _configurable)
491 491 for perm in perms:
492 492 self.permissions_global.add(perm.permission.permission_name)
493 493
494 494 # user explicit global permissions
495 495 user_perms = Session().query(UserToPerm)\
496 496 .options(joinedload(UserToPerm.permission))\
497 497 .filter(UserToPerm.user_id == self.user_id).all()
498 498
499 499 if not self.inherit_default_permissions:
500 500 # NEED TO IGNORE all configurable permissions and
501 501 # replace them with explicitly set from this user permissions
502 502 self.permissions_global = self.permissions_global.difference(
503 503 _configurable)
504 504 for perm in user_perms:
505 505 self.permissions_global.add(perm.permission.permission_name)
506 506
507 507 def _calculate_default_permissions(self):
508 508 """
509 509 Set default user permissions for repositories, repository groups
510 510 taken from the default user.
511 511
512 512 Calculate inheritance of object permissions based on what we have now
513 513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
514 514 explicitly set. Inherit is the opposite of .false being there.
515 515
516 516 .. note::
517 517
518 518 the syntax is little bit odd but what we need to check here is
519 519 the opposite of .false permission being in the list so even for
520 520 inconsistent state when both .true/.false is there
521 521 .false is more important
522 522
523 523 """
524 524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
525 525 in self.permissions_global)
526 526
527 527 # defaults for repositories, taken from `default` user permissions
528 528 # on given repo
529 529 for perm in self.default_repo_perms:
530 530 r_k = perm.UserRepoToPerm.repository.repo_name
531 531 o = PermOrigin.REPO_DEFAULT
532 532 if perm.Repository.private and not (
533 533 perm.Repository.user_id == self.user_id):
534 534 # disable defaults for private repos,
535 535 p = 'repository.none'
536 536 o = PermOrigin.REPO_PRIVATE
537 537 elif perm.Repository.user_id == self.user_id:
538 538 # set admin if owner
539 539 p = 'repository.admin'
540 540 o = PermOrigin.REPO_OWNER
541 541 else:
542 542 p = perm.Permission.permission_name
543 543 # if we decide this user isn't inheriting permissions from
544 544 # default user we set him to .none so only explicit
545 545 # permissions work
546 546 if not user_inherit_object_permissions:
547 547 p = 'repository.none'
548 548 self.permissions_repositories[r_k] = p, o
549 549
550 550 # defaults for repository groups taken from `default` user permission
551 551 # on given group
552 552 for perm in self.default_repo_groups_perms:
553 553 rg_k = perm.UserRepoGroupToPerm.group.group_name
554 554 o = PermOrigin.REPOGROUP_DEFAULT
555 555 if perm.RepoGroup.user_id == self.user_id:
556 556 # set admin if owner
557 557 p = 'group.admin'
558 558 o = PermOrigin.REPOGROUP_OWNER
559 559 else:
560 560 p = perm.Permission.permission_name
561 561
562 562 # if we decide this user isn't inheriting permissions from default
563 563 # user we set him to .none so only explicit permissions work
564 564 if not user_inherit_object_permissions:
565 565 p = 'group.none'
566 566 self.permissions_repository_groups[rg_k] = p, o
567 567
568 568 # defaults for user groups taken from `default` user permission
569 569 # on given user group
570 570 for perm in self.default_user_group_perms:
571 571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
572 572 o = PermOrigin.USERGROUP_DEFAULT
573 573 if perm.UserGroup.user_id == self.user_id:
574 574 # set admin if owner
575 575 p = 'usergroup.admin'
576 576 o = PermOrigin.USERGROUP_OWNER
577 577 else:
578 578 p = perm.Permission.permission_name
579 579
580 580 # if we decide this user isn't inheriting permissions from default
581 581 # user we set him to .none so only explicit permissions work
582 582 if not user_inherit_object_permissions:
583 583 p = 'usergroup.none'
584 584 self.permissions_user_groups[u_k] = p, o
585 585
586 586 def _calculate_repository_permissions(self):
587 587 """
588 588 Repository permissions for the current user.
589 589
590 590 Check if the user is part of user groups for this repository and
591 591 fill in the permission from it. `_choose_permission` decides of which
592 592 permission should be selected based on selected method.
593 593 """
594 594
595 595 # user group for repositories permissions
596 596 user_repo_perms_from_user_group = Permission\
597 597 .get_default_repo_perms_from_user_group(
598 598 self.user_id, self.scope_repo_id)
599 599
600 600 multiple_counter = collections.defaultdict(int)
601 601 for perm in user_repo_perms_from_user_group:
602 602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
603 603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
604 604 multiple_counter[r_k] += 1
605 605 p = perm.Permission.permission_name
606 606 o = PermOrigin.REPO_USERGROUP % ug_k
607 607
608 608 if perm.Repository.user_id == self.user_id:
609 609 # set admin if owner
610 610 p = 'repository.admin'
611 611 o = PermOrigin.REPO_OWNER
612 612 else:
613 613 if multiple_counter[r_k] > 1:
614 614 cur_perm = self.permissions_repositories[r_k]
615 615 p = self._choose_permission(p, cur_perm)
616 616 self.permissions_repositories[r_k] = p, o
617 617
618 618 # user explicit permissions for repositories, overrides any specified
619 619 # by the group permission
620 620 user_repo_perms = Permission.get_default_repo_perms(
621 621 self.user_id, self.scope_repo_id)
622 622 for perm in user_repo_perms:
623 623 r_k = perm.UserRepoToPerm.repository.repo_name
624 624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
625 625 # set admin if owner
626 626 if perm.Repository.user_id == self.user_id:
627 627 p = 'repository.admin'
628 628 o = PermOrigin.REPO_OWNER
629 629 else:
630 630 p = perm.Permission.permission_name
631 631 if not self.explicit:
632 632 cur_perm = self.permissions_repositories.get(
633 633 r_k, 'repository.none')
634 634 p = self._choose_permission(p, cur_perm)
635 635 self.permissions_repositories[r_k] = p, o
636 636
637 637 def _calculate_repository_group_permissions(self):
638 638 """
639 639 Repository group permissions for the current user.
640 640
641 641 Check if the user is part of user groups for repository groups and
642 642 fill in the permissions from it. `_choose_permmission` decides of which
643 643 permission should be selected based on selected method.
644 644 """
645 645 # user group for repo groups permissions
646 646 user_repo_group_perms_from_user_group = Permission\
647 647 .get_default_group_perms_from_user_group(
648 648 self.user_id, self.scope_repo_group_id)
649 649
650 650 multiple_counter = collections.defaultdict(int)
651 651 for perm in user_repo_group_perms_from_user_group:
652 652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
653 653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
654 654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
655 655 multiple_counter[g_k] += 1
656 656 p = perm.Permission.permission_name
657 657 if perm.RepoGroup.user_id == self.user_id:
658 658 # set admin if owner, even for member of other user group
659 659 p = 'group.admin'
660 660 o = PermOrigin.REPOGROUP_OWNER
661 661 else:
662 662 if multiple_counter[g_k] > 1:
663 663 cur_perm = self.permissions_repository_groups[g_k]
664 664 p = self._choose_permission(p, cur_perm)
665 665 self.permissions_repository_groups[g_k] = p, o
666 666
667 667 # user explicit permissions for repository groups
668 668 user_repo_groups_perms = Permission.get_default_group_perms(
669 669 self.user_id, self.scope_repo_group_id)
670 670 for perm in user_repo_groups_perms:
671 671 rg_k = perm.UserRepoGroupToPerm.group.group_name
672 672 u_k = perm.UserRepoGroupToPerm.user.username
673 673 o = PermOrigin.REPOGROUP_USER % u_k
674 674
675 675 if perm.RepoGroup.user_id == self.user_id:
676 676 # set admin if owner
677 677 p = 'group.admin'
678 678 o = PermOrigin.REPOGROUP_OWNER
679 679 else:
680 680 p = perm.Permission.permission_name
681 681 if not self.explicit:
682 682 cur_perm = self.permissions_repository_groups.get(
683 683 rg_k, 'group.none')
684 684 p = self._choose_permission(p, cur_perm)
685 685 self.permissions_repository_groups[rg_k] = p, o
686 686
687 687 def _calculate_user_group_permissions(self):
688 688 """
689 689 User group permissions for the current user.
690 690 """
691 691 # user group for user group permissions
692 692 user_group_from_user_group = Permission\
693 693 .get_default_user_group_perms_from_user_group(
694 694 self.user_id, self.scope_user_group_id)
695 695
696 696 multiple_counter = collections.defaultdict(int)
697 697 for perm in user_group_from_user_group:
698 698 g_k = perm.UserGroupUserGroupToPerm\
699 699 .target_user_group.users_group_name
700 700 u_k = perm.UserGroupUserGroupToPerm\
701 701 .user_group.users_group_name
702 702 o = PermOrigin.USERGROUP_USERGROUP % u_k
703 703 multiple_counter[g_k] += 1
704 704 p = perm.Permission.permission_name
705 705
706 706 if perm.UserGroup.user_id == self.user_id:
707 707 # set admin if owner, even for member of other user group
708 708 p = 'usergroup.admin'
709 709 o = PermOrigin.USERGROUP_OWNER
710 710 else:
711 711 if multiple_counter[g_k] > 1:
712 712 cur_perm = self.permissions_user_groups[g_k]
713 713 p = self._choose_permission(p, cur_perm)
714 714 self.permissions_user_groups[g_k] = p, o
715 715
716 716 # user explicit permission for user groups
717 717 user_user_groups_perms = Permission.get_default_user_group_perms(
718 718 self.user_id, self.scope_user_group_id)
719 719 for perm in user_user_groups_perms:
720 720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
721 721 u_k = perm.UserUserGroupToPerm.user.username
722 722 o = PermOrigin.USERGROUP_USER % u_k
723 723
724 724 if perm.UserGroup.user_id == self.user_id:
725 725 # set admin if owner
726 726 p = 'usergroup.admin'
727 727 o = PermOrigin.USERGROUP_OWNER
728 728 else:
729 729 p = perm.Permission.permission_name
730 730 if not self.explicit:
731 731 cur_perm = self.permissions_user_groups.get(
732 732 ug_k, 'usergroup.none')
733 733 p = self._choose_permission(p, cur_perm)
734 734 self.permissions_user_groups[ug_k] = p, o
735 735
736 736 def _choose_permission(self, new_perm, cur_perm):
737 737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
738 738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
739 739 if self.algo == 'higherwin':
740 740 if new_perm_val > cur_perm_val:
741 741 return new_perm
742 742 return cur_perm
743 743 elif self.algo == 'lowerwin':
744 744 if new_perm_val < cur_perm_val:
745 745 return new_perm
746 746 return cur_perm
747 747
748 748 def _permission_structure(self):
749 749 return {
750 750 'global': self.permissions_global,
751 751 'repositories': self.permissions_repositories,
752 752 'repositories_groups': self.permissions_repository_groups,
753 753 'user_groups': self.permissions_user_groups,
754 754 }
755 755
756 756
757 757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
758 758 """
759 759 Check if given controller_name is in whitelist of auth token access
760 760 """
761 761 if not whitelist:
762 762 from rhodecode import CONFIG
763 763 whitelist = aslist(
764 764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
765 765 log.debug(
766 766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
767 767
768 768 auth_token_access_valid = False
769 769 for entry in whitelist:
770 770 if fnmatch.fnmatch(controller_name, entry):
771 771 auth_token_access_valid = True
772 772 break
773 773
774 774 if auth_token_access_valid:
775 775 log.debug('controller:%s matches entry in whitelist'
776 776 % (controller_name,))
777 777 else:
778 778 msg = ('controller: %s does *NOT* match any entry in whitelist'
779 779 % (controller_name,))
780 780 if auth_token:
781 781 # if we use auth token key and don't have access it's a warning
782 782 log.warning(msg)
783 783 else:
784 784 log.debug(msg)
785 785
786 786 return auth_token_access_valid
787 787
788 788
789 789 class AuthUser(object):
790 790 """
791 791 A simple object that handles all attributes of user in RhodeCode
792 792
793 793 It does lookup based on API key,given user, or user present in session
794 794 Then it fills all required information for such user. It also checks if
795 795 anonymous access is enabled and if so, it returns default user as logged in
796 796 """
797 797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
798 798
799 799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
800 800
801 801 self.user_id = user_id
802 802 self._api_key = api_key
803 803
804 804 self.api_key = None
805 805 self.feed_token = ''
806 806 self.username = username
807 807 self.ip_addr = ip_addr
808 808 self.name = ''
809 809 self.lastname = ''
810 810 self.first_name = ''
811 811 self.last_name = ''
812 812 self.email = ''
813 813 self.is_authenticated = False
814 814 self.admin = False
815 815 self.inherit_default_permissions = False
816 816 self.password = ''
817 817
818 818 self.anonymous_user = None # propagated on propagate_data
819 819 self.propagate_data()
820 820 self._instance = None
821 821 self._permissions_scoped_cache = {} # used to bind scoped calculation
822 822
823 823 @LazyProperty
824 824 def permissions(self):
825 825 return self.get_perms(user=self, cache=False)
826 826
827 827 def permissions_with_scope(self, scope):
828 828 """
829 829 Call the get_perms function with scoped data. The scope in that function
830 830 narrows the SQL calls to the given ID of objects resulting in fetching
831 831 Just particular permission we want to obtain. If scope is an empty dict
832 832 then it basically narrows the scope to GLOBAL permissions only.
833 833
834 834 :param scope: dict
835 835 """
836 836 if 'repo_name' in scope:
837 837 obj = Repository.get_by_repo_name(scope['repo_name'])
838 838 if obj:
839 839 scope['repo_id'] = obj.repo_id
840 840 _scope = {
841 841 'repo_id': -1,
842 842 'user_group_id': -1,
843 843 'repo_group_id': -1,
844 844 }
845 845 _scope.update(scope)
846 846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
847 847 _scope.items())))
848 848 if cache_key not in self._permissions_scoped_cache:
849 849 # store in cache to mimic how the @LazyProperty works,
850 850 # the difference here is that we use the unique key calculated
851 851 # from params and values
852 852 res = self.get_perms(user=self, cache=False, scope=_scope)
853 853 self._permissions_scoped_cache[cache_key] = res
854 854 return self._permissions_scoped_cache[cache_key]
855 855
856 856 def get_instance(self):
857 857 return User.get(self.user_id)
858 858
859 859 def update_lastactivity(self):
860 860 if self.user_id:
861 861 User.get(self.user_id).update_lastactivity()
862 862
863 863 def propagate_data(self):
864 864 """
865 865 Fills in user data and propagates values to this instance. Maps fetched
866 866 user attributes to this class instance attributes
867 867 """
868 868 log.debug('starting data propagation for new potential AuthUser')
869 869 user_model = UserModel()
870 870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
871 871 is_user_loaded = False
872 872
873 873 # lookup by userid
874 874 if self.user_id is not None and self.user_id != anon_user.user_id:
875 875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
876 876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
877 877
878 878 # try go get user by api key
879 879 elif self._api_key and self._api_key != anon_user.api_key:
880 880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
881 881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
882 882
883 883 # lookup by username
884 884 elif self.username:
885 885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
886 886 is_user_loaded = user_model.fill_data(self, username=self.username)
887 887 else:
888 888 log.debug('No data in %s that could been used to log in' % self)
889 889
890 890 if not is_user_loaded:
891 891 log.debug('Failed to load user. Fallback to default user')
892 892 # if we cannot authenticate user try anonymous
893 893 if anon_user.active:
894 894 user_model.fill_data(self, user_id=anon_user.user_id)
895 895 # then we set this user is logged in
896 896 self.is_authenticated = True
897 897 else:
898 898 # in case of disabled anonymous user we reset some of the
899 899 # parameters so such user is "corrupted", skipping the fill_data
900 900 for attr in ['user_id', 'username', 'admin', 'active']:
901 901 setattr(self, attr, None)
902 902 self.is_authenticated = False
903 903
904 904 if not self.username:
905 905 self.username = 'None'
906 906
907 907 log.debug('Auth User is now %s' % self)
908 908
909 909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
910 910 cache=False):
911 911 """
912 912 Fills user permission attribute with permissions taken from database
913 913 works for permissions given for repositories, and for permissions that
914 914 are granted to groups
915 915
916 916 :param user: instance of User object from database
917 917 :param explicit: In case there are permissions both for user and a group
918 918 that user is part of, explicit flag will defiine if user will
919 919 explicitly override permissions from group, if it's False it will
920 920 make decision based on the algo
921 921 :param algo: algorithm to decide what permission should be choose if
922 922 it's multiple defined, eg user in two different groups. It also
923 923 decides if explicit flag is turned off how to specify the permission
924 924 for case when user is in a group + have defined separate permission
925 925 """
926 926 user_id = user.user_id
927 927 user_is_admin = user.is_admin
928 928
929 929 # inheritance of global permissions like create repo/fork repo etc
930 930 user_inherit_default_permissions = user.inherit_default_permissions
931 931
932 932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
933 933 compute = caches.conditional_cache(
934 934 'short_term', 'cache_desc',
935 935 condition=cache, func=_cached_perms_data)
936 936 result = compute(user_id, scope, user_is_admin,
937 937 user_inherit_default_permissions, explicit, algo)
938 938
939 939 result_repr = []
940 940 for k in result:
941 941 result_repr.append((k, len(result[k])))
942 942
943 943 log.debug('PERMISSION tree computed %s' % (result_repr,))
944 944 return result
945 945
946 946 @property
947 947 def is_default(self):
948 948 return self.username == User.DEFAULT_USER
949 949
950 950 @property
951 951 def is_admin(self):
952 952 return self.admin
953 953
954 954 @property
955 955 def is_user_object(self):
956 956 return self.user_id is not None
957 957
958 958 @property
959 959 def repositories_admin(self):
960 960 """
961 961 Returns list of repositories you're an admin of
962 962 """
963 963 return [
964 964 x[0] for x in self.permissions['repositories'].iteritems()
965 965 if x[1] == 'repository.admin']
966 966
967 967 @property
968 968 def repository_groups_admin(self):
969 969 """
970 970 Returns list of repository groups you're an admin of
971 971 """
972 972 return [
973 973 x[0] for x in self.permissions['repositories_groups'].iteritems()
974 974 if x[1] == 'group.admin']
975 975
976 976 @property
977 977 def user_groups_admin(self):
978 978 """
979 979 Returns list of user groups you're an admin of
980 980 """
981 981 return [
982 982 x[0] for x in self.permissions['user_groups'].iteritems()
983 983 if x[1] == 'usergroup.admin']
984 984
985 985 @property
986 986 def ip_allowed(self):
987 987 """
988 988 Checks if ip_addr used in constructor is allowed from defined list of
989 989 allowed ip_addresses for user
990 990
991 991 :returns: boolean, True if ip is in allowed ip range
992 992 """
993 993 # check IP
994 994 inherit = self.inherit_default_permissions
995 995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
996 996 inherit_from_default=inherit)
997 997 @property
998 998 def personal_repo_group(self):
999 999 return RepoGroup.get_user_personal_repo_group(self.user_id)
1000 1000
1001 1001 @classmethod
1002 1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1003 1003 allowed_ips = AuthUser.get_allowed_ips(
1004 1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1005 1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1006 1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1007 1007 return True
1008 1008 else:
1009 1009 log.info('Access for IP:%s forbidden, '
1010 1010 'not in %s' % (ip_addr, allowed_ips))
1011 1011 return False
1012 1012
1013 1013 def __repr__(self):
1014 1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1015 1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1016 1016
1017 1017 def set_authenticated(self, authenticated=True):
1018 1018 if self.user_id != self.anonymous_user.user_id:
1019 1019 self.is_authenticated = authenticated
1020 1020
1021 1021 def get_cookie_store(self):
1022 1022 return {
1023 1023 'username': self.username,
1024 1024 'password': md5(self.password),
1025 1025 'user_id': self.user_id,
1026 1026 'is_authenticated': self.is_authenticated
1027 1027 }
1028 1028
1029 1029 @classmethod
1030 1030 def from_cookie_store(cls, cookie_store):
1031 1031 """
1032 1032 Creates AuthUser from a cookie store
1033 1033
1034 1034 :param cls:
1035 1035 :param cookie_store:
1036 1036 """
1037 1037 user_id = cookie_store.get('user_id')
1038 1038 username = cookie_store.get('username')
1039 1039 api_key = cookie_store.get('api_key')
1040 1040 return AuthUser(user_id, api_key, username)
1041 1041
1042 1042 @classmethod
1043 1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1044 1044 _set = set()
1045 1045
1046 1046 if inherit_from_default:
1047 1047 default_ips = UserIpMap.query().filter(
1048 1048 UserIpMap.user == User.get_default_user(cache=True))
1049 1049 if cache:
1050 1050 default_ips = default_ips.options(
1051 1051 FromCache("sql_cache_short", "get_user_ips_default"))
1052 1052
1053 1053 # populate from default user
1054 1054 for ip in default_ips:
1055 1055 try:
1056 1056 _set.add(ip.ip_addr)
1057 1057 except ObjectDeletedError:
1058 1058 # since we use heavy caching sometimes it happens that
1059 1059 # we get deleted objects here, we just skip them
1060 1060 pass
1061 1061
1062 1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1063 1063 if cache:
1064 1064 user_ips = user_ips.options(
1065 1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1066 1066
1067 1067 for ip in user_ips:
1068 1068 try:
1069 1069 _set.add(ip.ip_addr)
1070 1070 except ObjectDeletedError:
1071 1071 # since we use heavy caching sometimes it happens that we get
1072 1072 # deleted objects here, we just skip them
1073 1073 pass
1074 1074 return _set or set(['0.0.0.0/0', '::/0'])
1075 1075
1076 1076
1077 1077 def set_available_permissions(config):
1078 1078 """
1079 1079 This function will propagate pylons globals with all available defined
1080 1080 permission given in db. We don't want to check each time from db for new
1081 1081 permissions since adding a new permission also requires application restart
1082 1082 ie. to decorate new views with the newly created permission
1083 1083
1084 1084 :param config: current pylons config instance
1085 1085
1086 1086 """
1087 1087 log.info('getting information about all available permissions')
1088 1088 try:
1089 1089 sa = meta.Session
1090 1090 all_perms = sa.query(Permission).all()
1091 1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1092 1092 except Exception:
1093 1093 log.error(traceback.format_exc())
1094 1094 finally:
1095 1095 meta.Session.remove()
1096 1096
1097 1097
1098 1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1099 1099 """
1100 1100 Return the current authentication token, creating one if one doesn't
1101 1101 already exist and the save_if_missing flag is present.
1102 1102
1103 1103 :param session: pass in the pylons session, else we use the global ones
1104 1104 :param force_new: force to re-generate the token and store it in session
1105 1105 :param save_if_missing: save the newly generated token if it's missing in
1106 1106 session
1107 1107 """
1108 1108 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1109 1109 # from pyramid.csrf import get_csrf_token
1110 1110
1111 1111 if not session:
1112 1112 from pylons import session
1113 1113
1114 1114 if (csrf_token_key not in session and save_if_missing) or force_new:
1115 1115 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1116 1116 session[csrf_token_key] = token
1117 1117 if hasattr(session, 'save'):
1118 1118 session.save()
1119 1119 return session.get(csrf_token_key)
1120 1120
1121 1121
1122 1122 def get_request(perm_class):
1123 1123 from pyramid.threadlocal import get_current_request
1124 1124 pyramid_request = get_current_request()
1125 1125 if not pyramid_request:
1126 1126 # return global request of pylons in case pyramid isn't available
1127 1127 # NOTE(marcink): this should be removed after migration to pyramid
1128 1128 from pylons import request
1129 1129 return request
1130 1130 return pyramid_request
1131 1131
1132 1132
1133 1133 # CHECK DECORATORS
1134 1134 class CSRFRequired(object):
1135 1135 """
1136 1136 Decorator for authenticating a form
1137 1137
1138 1138 This decorator uses an authorization token stored in the client's
1139 1139 session for prevention of certain Cross-site request forgery (CSRF)
1140 1140 attacks (See
1141 1141 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1142 1142 information).
1143 1143
1144 1144 For use with the ``webhelpers.secure_form`` helper functions.
1145 1145
1146 1146 """
1147 1147 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1148 1148 except_methods=None):
1149 1149 self.token = token
1150 1150 self.header = header
1151 1151 self.except_methods = except_methods or []
1152 1152
1153 1153 def __call__(self, func):
1154 1154 return get_cython_compat_decorator(self.__wrapper, func)
1155 1155
1156 1156 def _get_csrf(self, _request):
1157 1157 return _request.POST.get(self.token, _request.headers.get(self.header))
1158 1158
1159 1159 def check_csrf(self, _request, cur_token):
1160 1160 supplied_token = self._get_csrf(_request)
1161 1161 return supplied_token and supplied_token == cur_token
1162 1162
1163 1163 def _get_request(self):
1164 1164 return get_request(self)
1165 1165
1166 1166 def __wrapper(self, func, *fargs, **fkwargs):
1167 1167 request = self._get_request()
1168 1168
1169 1169 if request.method in self.except_methods:
1170 1170 return func(*fargs, **fkwargs)
1171 1171
1172 1172 cur_token = get_csrf_token(save_if_missing=False)
1173 1173 if self.check_csrf(request, cur_token):
1174 1174 if request.POST.get(self.token):
1175 1175 del request.POST[self.token]
1176 1176 return func(*fargs, **fkwargs)
1177 1177 else:
1178 1178 reason = 'token-missing'
1179 1179 supplied_token = self._get_csrf(request)
1180 1180 if supplied_token and cur_token != supplied_token:
1181 1181 reason = 'token-mismatch [%s:%s]' % (
1182 1182 cur_token or ''[:6], supplied_token or ''[:6])
1183 1183
1184 1184 csrf_message = \
1185 1185 ("Cross-site request forgery detected, request denied. See "
1186 1186 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1187 1187 "more information.")
1188 1188 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1189 1189 'REMOTE_ADDR:%s, HEADERS:%s' % (
1190 1190 request, reason, request.remote_addr, request.headers))
1191 1191
1192 1192 raise HTTPForbidden(explanation=csrf_message)
1193 1193
1194 1194
1195 1195 class LoginRequired(object):
1196 1196 """
1197 1197 Must be logged in to execute this function else
1198 1198 redirect to login page
1199 1199
1200 1200 :param api_access: if enabled this checks only for valid auth token
1201 1201 and grants access based on valid token
1202 1202 """
1203 1203 def __init__(self, auth_token_access=None):
1204 1204 self.auth_token_access = auth_token_access
1205 1205
1206 1206 def __call__(self, func):
1207 1207 return get_cython_compat_decorator(self.__wrapper, func)
1208 1208
1209 1209 def _get_request(self):
1210 1210 return get_request(self)
1211 1211
1212 1212 def __wrapper(self, func, *fargs, **fkwargs):
1213 1213 from rhodecode.lib import helpers as h
1214 1214 cls = fargs[0]
1215 1215 user = cls._rhodecode_user
1216 1216 request = self._get_request()
1217 1217
1218 1218 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1219 1219 log.debug('Starting login restriction checks for user: %s' % (user,))
1220 1220 # check if our IP is allowed
1221 1221 ip_access_valid = True
1222 1222 if not user.ip_allowed:
1223 1223 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1224 1224 category='warning')
1225 1225 ip_access_valid = False
1226 1226
1227 1227 # check if we used an APIKEY and it's a valid one
1228 1228 # defined white-list of controllers which API access will be enabled
1229 1229 _auth_token = request.GET.get(
1230 1230 'auth_token', '') or request.GET.get('api_key', '')
1231 1231 auth_token_access_valid = allowed_auth_token_access(
1232 1232 loc, auth_token=_auth_token)
1233 1233
1234 1234 # explicit controller is enabled or API is in our whitelist
1235 1235 if self.auth_token_access or auth_token_access_valid:
1236 1236 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1237 1237 db_user = user.get_instance()
1238 1238
1239 1239 if db_user:
1240 1240 if self.auth_token_access:
1241 1241 roles = self.auth_token_access
1242 1242 else:
1243 1243 roles = [UserApiKeys.ROLE_HTTP]
1244 1244 token_match = db_user.authenticate_by_token(
1245 1245 _auth_token, roles=roles)
1246 1246 else:
1247 1247 log.debug('Unable to fetch db instance for auth user: %s', user)
1248 1248 token_match = False
1249 1249
1250 1250 if _auth_token and token_match:
1251 1251 auth_token_access_valid = True
1252 1252 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1253 1253 else:
1254 1254 auth_token_access_valid = False
1255 1255 if not _auth_token:
1256 1256 log.debug("AUTH TOKEN *NOT* present in request")
1257 1257 else:
1258 1258 log.warning(
1259 1259 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1260 1260
1261 1261 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1262 1262 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1263 1263 else 'AUTH_TOKEN_AUTH'
1264 1264
1265 1265 if ip_access_valid and (
1266 1266 user.is_authenticated or auth_token_access_valid):
1267 1267 log.info(
1268 1268 'user %s authenticating with:%s IS authenticated on func %s'
1269 1269 % (user, reason, loc))
1270 1270
1271 1271 # update user data to check last activity
1272 1272 user.update_lastactivity()
1273 1273 Session().commit()
1274 1274 return func(*fargs, **fkwargs)
1275 1275 else:
1276 1276 log.warning(
1277 1277 'user %s authenticating with:%s NOT authenticated on '
1278 1278 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1279 1279 % (user, reason, loc, ip_access_valid,
1280 1280 auth_token_access_valid))
1281 1281 # we preserve the get PARAM
1282 1282 came_from = request.path_qs
1283 1283 log.debug('redirecting to login page with %s' % (came_from,))
1284 1284 raise HTTPFound(
1285 1285 h.route_path('login', _query={'came_from': came_from}))
1286 1286
1287 1287
1288 1288 class NotAnonymous(object):
1289 1289 """
1290 1290 Must be logged in to execute this function else
1291 1291 redirect to login page
1292 1292 """
1293 1293
1294 1294 def __call__(self, func):
1295 1295 return get_cython_compat_decorator(self.__wrapper, func)
1296 1296
1297 1297 def _get_request(self):
1298 1298 return get_request(self)
1299 1299
1300 1300 def __wrapper(self, func, *fargs, **fkwargs):
1301 1301 import rhodecode.lib.helpers as h
1302 1302 cls = fargs[0]
1303 1303 self.user = cls._rhodecode_user
1304 1304 request = self._get_request()
1305 1305
1306 1306 log.debug('Checking if user is not anonymous @%s' % cls)
1307 1307
1308 1308 anonymous = self.user.username == User.DEFAULT_USER
1309 1309
1310 1310 if anonymous:
1311 1311 came_from = request.path_qs
1312 1312 h.flash(_('You need to be a registered user to '
1313 1313 'perform this action'),
1314 1314 category='warning')
1315 1315 raise HTTPFound(
1316 1316 h.route_path('login', _query={'came_from': came_from}))
1317 1317 else:
1318 1318 return func(*fargs, **fkwargs)
1319 1319
1320 1320
1321 1321 class XHRRequired(object):
1322 1322 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1323 1323
1324 1324 def __call__(self, func):
1325 1325 return get_cython_compat_decorator(self.__wrapper, func)
1326 1326
1327 1327 def _get_request(self):
1328 1328 return get_request(self)
1329 1329
1330 1330 def __wrapper(self, func, *fargs, **fkwargs):
1331 1331 from pylons.controllers.util import abort
1332 1332 request = self._get_request()
1333 1333
1334 1334 log.debug('Checking if request is XMLHttpRequest (XHR)')
1335 1335 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1336 1336
1337 1337 if not request.is_xhr:
1338 1338 abort(400, detail=xhr_message)
1339 1339
1340 1340 return func(*fargs, **fkwargs)
1341 1341
1342 1342
1343 1343 class HasAcceptedRepoType(object):
1344 1344 """
1345 1345 Check if requested repo is within given repo type aliases
1346 1346 """
1347 1347
1348 1348 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1349 1349
1350 1350 def __init__(self, *repo_type_list):
1351 1351 self.repo_type_list = set(repo_type_list)
1352 1352
1353 1353 def __call__(self, func):
1354 1354 return get_cython_compat_decorator(self.__wrapper, func)
1355 1355
1356 1356 def __wrapper(self, func, *fargs, **fkwargs):
1357 1357 import rhodecode.lib.helpers as h
1358 1358 cls = fargs[0]
1359 1359 rhodecode_repo = cls.rhodecode_repo
1360 1360
1361 1361 log.debug('%s checking repo type for %s in %s',
1362 1362 self.__class__.__name__,
1363 1363 rhodecode_repo.alias, self.repo_type_list)
1364 1364
1365 1365 if rhodecode_repo.alias in self.repo_type_list:
1366 1366 return func(*fargs, **fkwargs)
1367 1367 else:
1368 1368 h.flash(h.literal(
1369 1369 _('Action not supported for %s.' % rhodecode_repo.alias)),
1370 1370 category='warning')
1371 1371 raise HTTPFound(
1372 1372 h.route_path('repo_summary',
1373 1373 repo_name=cls.rhodecode_db_repo.repo_name))
1374 1374
1375 1375
1376 1376 class PermsDecorator(object):
1377 1377 """
1378 1378 Base class for controller decorators, we extract the current user from
1379 1379 the class itself, which has it stored in base controllers
1380 1380 """
1381 1381
1382 1382 def __init__(self, *required_perms):
1383 1383 self.required_perms = set(required_perms)
1384 1384
1385 1385 def __call__(self, func):
1386 1386 return get_cython_compat_decorator(self.__wrapper, func)
1387 1387
1388 1388 def _get_request(self):
1389 1389 return get_request(self)
1390 1390
1391 1391 def _get_came_from(self):
1392 1392 _request = self._get_request()
1393 1393
1394 1394 # both pylons/pyramid has this attribute
1395 1395 return _request.path_qs
1396 1396
1397 1397 def __wrapper(self, func, *fargs, **fkwargs):
1398 1398 import rhodecode.lib.helpers as h
1399 1399 cls = fargs[0]
1400 1400 _user = cls._rhodecode_user
1401 1401
1402 1402 log.debug('checking %s permissions %s for %s %s',
1403 1403 self.__class__.__name__, self.required_perms, cls, _user)
1404 1404
1405 1405 if self.check_permissions(_user):
1406 1406 log.debug('Permission granted for %s %s', cls, _user)
1407 1407 return func(*fargs, **fkwargs)
1408 1408
1409 1409 else:
1410 1410 log.debug('Permission denied for %s %s', cls, _user)
1411 1411 anonymous = _user.username == User.DEFAULT_USER
1412 1412
1413 1413 if anonymous:
1414 1414 came_from = self._get_came_from()
1415 1415 h.flash(_('You need to be signed in to view this page'),
1416 1416 category='warning')
1417 1417 raise HTTPFound(
1418 1418 h.route_path('login', _query={'came_from': came_from}))
1419 1419
1420 1420 else:
1421 1421 # redirect with 404 to prevent resource discovery
1422 1422 raise HTTPNotFound()
1423 1423
1424 1424 def check_permissions(self, user):
1425 1425 """Dummy function for overriding"""
1426 1426 raise NotImplementedError(
1427 1427 'You have to write this function in child class')
1428 1428
1429 1429
1430 1430 class HasPermissionAllDecorator(PermsDecorator):
1431 1431 """
1432 1432 Checks for access permission for all given predicates. All of them
1433 1433 have to be meet in order to fulfill the request
1434 1434 """
1435 1435
1436 1436 def check_permissions(self, user):
1437 1437 perms = user.permissions_with_scope({})
1438 1438 if self.required_perms.issubset(perms['global']):
1439 1439 return True
1440 1440 return False
1441 1441
1442 1442
1443 1443 class HasPermissionAnyDecorator(PermsDecorator):
1444 1444 """
1445 1445 Checks for access permission for any of given predicates. In order to
1446 1446 fulfill the request any of predicates must be meet
1447 1447 """
1448 1448
1449 1449 def check_permissions(self, user):
1450 1450 perms = user.permissions_with_scope({})
1451 1451 if self.required_perms.intersection(perms['global']):
1452 1452 return True
1453 1453 return False
1454 1454
1455 1455
1456 1456 class HasRepoPermissionAllDecorator(PermsDecorator):
1457 1457 """
1458 1458 Checks for access permission for all given predicates for specific
1459 1459 repository. All of them have to be meet in order to fulfill the request
1460 1460 """
1461 1461 def _get_repo_name(self):
1462 1462 _request = self._get_request()
1463 1463 return get_repo_slug(_request)
1464 1464
1465 1465 def check_permissions(self, user):
1466 1466 perms = user.permissions
1467 1467 repo_name = self._get_repo_name()
1468 1468
1469 1469 try:
1470 1470 user_perms = set([perms['repositories'][repo_name]])
1471 1471 except KeyError:
1472 1472 log.debug('cannot locate repo with name: `%s` in permissions defs',
1473 1473 repo_name)
1474 1474 return False
1475 1475
1476 1476 log.debug('checking `%s` permissions for repo `%s`',
1477 1477 user_perms, repo_name)
1478 1478 if self.required_perms.issubset(user_perms):
1479 1479 return True
1480 1480 return False
1481 1481
1482 1482
1483 1483 class HasRepoPermissionAnyDecorator(PermsDecorator):
1484 1484 """
1485 1485 Checks for access permission for any of given predicates for specific
1486 1486 repository. In order to fulfill the request any of predicates must be meet
1487 1487 """
1488 1488 def _get_repo_name(self):
1489 1489 _request = self._get_request()
1490 1490 return get_repo_slug(_request)
1491 1491
1492 1492 def check_permissions(self, user):
1493 1493 perms = user.permissions
1494 1494 repo_name = self._get_repo_name()
1495 1495
1496 1496 try:
1497 1497 user_perms = set([perms['repositories'][repo_name]])
1498 1498 except KeyError:
1499 1499 log.debug('cannot locate repo with name: `%s` in permissions defs',
1500 1500 repo_name)
1501 1501 return False
1502 1502
1503 1503 log.debug('checking `%s` permissions for repo `%s`',
1504 1504 user_perms, repo_name)
1505 1505 if self.required_perms.intersection(user_perms):
1506 1506 return True
1507 1507 return False
1508 1508
1509 1509
1510 1510 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1511 1511 """
1512 1512 Checks for access permission for all given predicates for specific
1513 1513 repository group. All of them have to be meet in order to
1514 1514 fulfill the request
1515 1515 """
1516 1516 def _get_repo_group_name(self):
1517 1517 _request = self._get_request()
1518 1518 return get_repo_group_slug(_request)
1519 1519
1520 1520 def check_permissions(self, user):
1521 1521 perms = user.permissions
1522 1522 group_name = self._get_repo_group_name()
1523 1523 try:
1524 1524 user_perms = set([perms['repositories_groups'][group_name]])
1525 1525 except KeyError:
1526 1526 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1527 1527 group_name)
1528 1528 return False
1529 1529
1530 1530 log.debug('checking `%s` permissions for repo group `%s`',
1531 1531 user_perms, group_name)
1532 1532 if self.required_perms.issubset(user_perms):
1533 1533 return True
1534 1534 return False
1535 1535
1536 1536
1537 1537 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1538 1538 """
1539 1539 Checks for access permission for any of given predicates for specific
1540 1540 repository group. In order to fulfill the request any
1541 1541 of predicates must be met
1542 1542 """
1543 1543 def _get_repo_group_name(self):
1544 1544 _request = self._get_request()
1545 1545 return get_repo_group_slug(_request)
1546 1546
1547 1547 def check_permissions(self, user):
1548 1548 perms = user.permissions
1549 1549 group_name = self._get_repo_group_name()
1550 1550
1551 1551 try:
1552 1552 user_perms = set([perms['repositories_groups'][group_name]])
1553 1553 except KeyError:
1554 1554 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1555 1555 group_name)
1556 1556 return False
1557 1557
1558 1558 log.debug('checking `%s` permissions for repo group `%s`',
1559 1559 user_perms, group_name)
1560 1560 if self.required_perms.intersection(user_perms):
1561 1561 return True
1562 1562 return False
1563 1563
1564 1564
1565 1565 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1566 1566 """
1567 1567 Checks for access permission for all given predicates for specific
1568 1568 user group. All of them have to be meet in order to fulfill the request
1569 1569 """
1570 1570 def _get_user_group_name(self):
1571 1571 _request = self._get_request()
1572 1572 return get_user_group_slug(_request)
1573 1573
1574 1574 def check_permissions(self, user):
1575 1575 perms = user.permissions
1576 1576 group_name = self._get_user_group_name()
1577 1577 try:
1578 1578 user_perms = set([perms['user_groups'][group_name]])
1579 1579 except KeyError:
1580 1580 return False
1581 1581
1582 1582 if self.required_perms.issubset(user_perms):
1583 1583 return True
1584 1584 return False
1585 1585
1586 1586
1587 1587 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1588 1588 """
1589 1589 Checks for access permission for any of given predicates for specific
1590 1590 user group. In order to fulfill the request any of predicates must be meet
1591 1591 """
1592 1592 def _get_user_group_name(self):
1593 1593 _request = self._get_request()
1594 1594 return get_user_group_slug(_request)
1595 1595
1596 1596 def check_permissions(self, user):
1597 1597 perms = user.permissions
1598 1598 group_name = self._get_user_group_name()
1599 1599 try:
1600 1600 user_perms = set([perms['user_groups'][group_name]])
1601 1601 except KeyError:
1602 1602 return False
1603 1603
1604 1604 if self.required_perms.intersection(user_perms):
1605 1605 return True
1606 1606 return False
1607 1607
1608 1608
1609 1609 # CHECK FUNCTIONS
1610 1610 class PermsFunction(object):
1611 1611 """Base function for other check functions"""
1612 1612
1613 1613 def __init__(self, *perms):
1614 1614 self.required_perms = set(perms)
1615 1615 self.repo_name = None
1616 1616 self.repo_group_name = None
1617 1617 self.user_group_name = None
1618 1618
1619 1619 def __bool__(self):
1620 1620 frame = inspect.currentframe()
1621 1621 stack_trace = traceback.format_stack(frame)
1622 1622 log.error('Checking bool value on a class instance of perm '
1623 1623 'function is not allowed: %s' % ''.join(stack_trace))
1624 1624 # rather than throwing errors, here we always return False so if by
1625 1625 # accident someone checks truth for just an instance it will always end
1626 1626 # up in returning False
1627 1627 return False
1628 1628 __nonzero__ = __bool__
1629 1629
1630 1630 def __call__(self, check_location='', user=None):
1631 1631 if not user:
1632 1632 log.debug('Using user attribute from global request')
1633 1633 # TODO: remove this someday,put as user as attribute here
1634 1634 request = self._get_request()
1635 1635 user = request.user
1636 1636
1637 1637 # init auth user if not already given
1638 1638 if not isinstance(user, AuthUser):
1639 1639 log.debug('Wrapping user %s into AuthUser', user)
1640 1640 user = AuthUser(user.user_id)
1641 1641
1642 1642 cls_name = self.__class__.__name__
1643 1643 check_scope = self._get_check_scope(cls_name)
1644 1644 check_location = check_location or 'unspecified location'
1645 1645
1646 1646 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1647 1647 self.required_perms, user, check_scope, check_location)
1648 1648 if not user:
1649 1649 log.warning('Empty user given for permission check')
1650 1650 return False
1651 1651
1652 1652 if self.check_permissions(user):
1653 1653 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1654 1654 check_scope, user, check_location)
1655 1655 return True
1656 1656
1657 1657 else:
1658 1658 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1659 1659 check_scope, user, check_location)
1660 1660 return False
1661 1661
1662 1662 def _get_request(self):
1663 1663 return get_request(self)
1664 1664
1665 1665 def _get_check_scope(self, cls_name):
1666 1666 return {
1667 1667 'HasPermissionAll': 'GLOBAL',
1668 1668 'HasPermissionAny': 'GLOBAL',
1669 1669 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1670 1670 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1671 1671 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1672 1672 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1673 1673 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1674 1674 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1675 1675 }.get(cls_name, '?:%s' % cls_name)
1676 1676
1677 1677 def check_permissions(self, user):
1678 1678 """Dummy function for overriding"""
1679 1679 raise Exception('You have to write this function in child class')
1680 1680
1681 1681
1682 1682 class HasPermissionAll(PermsFunction):
1683 1683 def check_permissions(self, user):
1684 1684 perms = user.permissions_with_scope({})
1685 1685 if self.required_perms.issubset(perms.get('global')):
1686 1686 return True
1687 1687 return False
1688 1688
1689 1689
1690 1690 class HasPermissionAny(PermsFunction):
1691 1691 def check_permissions(self, user):
1692 1692 perms = user.permissions_with_scope({})
1693 1693 if self.required_perms.intersection(perms.get('global')):
1694 1694 return True
1695 1695 return False
1696 1696
1697 1697
1698 1698 class HasRepoPermissionAll(PermsFunction):
1699 1699 def __call__(self, repo_name=None, check_location='', user=None):
1700 1700 self.repo_name = repo_name
1701 1701 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1702 1702
1703 1703 def _get_repo_name(self):
1704 1704 if not self.repo_name:
1705 1705 _request = self._get_request()
1706 1706 self.repo_name = get_repo_slug(_request)
1707 1707 return self.repo_name
1708 1708
1709 1709 def check_permissions(self, user):
1710 1710 self.repo_name = self._get_repo_name()
1711 1711 perms = user.permissions
1712 1712 try:
1713 1713 user_perms = set([perms['repositories'][self.repo_name]])
1714 1714 except KeyError:
1715 1715 return False
1716 1716 if self.required_perms.issubset(user_perms):
1717 1717 return True
1718 1718 return False
1719 1719
1720 1720
1721 1721 class HasRepoPermissionAny(PermsFunction):
1722 1722 def __call__(self, repo_name=None, check_location='', user=None):
1723 1723 self.repo_name = repo_name
1724 1724 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1725 1725
1726 1726 def _get_repo_name(self):
1727 1727 if not self.repo_name:
1728 1728 _request = self._get_request()
1729 1729 self.repo_name = get_repo_slug(_request)
1730 1730 return self.repo_name
1731 1731
1732 1732 def check_permissions(self, user):
1733 1733 self.repo_name = self._get_repo_name()
1734 1734 perms = user.permissions
1735 1735 try:
1736 1736 user_perms = set([perms['repositories'][self.repo_name]])
1737 1737 except KeyError:
1738 1738 return False
1739 1739 if self.required_perms.intersection(user_perms):
1740 1740 return True
1741 1741 return False
1742 1742
1743 1743
1744 1744 class HasRepoGroupPermissionAny(PermsFunction):
1745 1745 def __call__(self, group_name=None, check_location='', user=None):
1746 1746 self.repo_group_name = group_name
1747 1747 return super(HasRepoGroupPermissionAny, self).__call__(
1748 1748 check_location, user)
1749 1749
1750 1750 def check_permissions(self, user):
1751 1751 perms = user.permissions
1752 1752 try:
1753 1753 user_perms = set(
1754 1754 [perms['repositories_groups'][self.repo_group_name]])
1755 1755 except KeyError:
1756 1756 return False
1757 1757 if self.required_perms.intersection(user_perms):
1758 1758 return True
1759 1759 return False
1760 1760
1761 1761
1762 1762 class HasRepoGroupPermissionAll(PermsFunction):
1763 1763 def __call__(self, group_name=None, check_location='', user=None):
1764 1764 self.repo_group_name = group_name
1765 1765 return super(HasRepoGroupPermissionAll, self).__call__(
1766 1766 check_location, user)
1767 1767
1768 1768 def check_permissions(self, user):
1769 1769 perms = user.permissions
1770 1770 try:
1771 1771 user_perms = set(
1772 1772 [perms['repositories_groups'][self.repo_group_name]])
1773 1773 except KeyError:
1774 1774 return False
1775 1775 if self.required_perms.issubset(user_perms):
1776 1776 return True
1777 1777 return False
1778 1778
1779 1779
1780 1780 class HasUserGroupPermissionAny(PermsFunction):
1781 1781 def __call__(self, user_group_name=None, check_location='', user=None):
1782 1782 self.user_group_name = user_group_name
1783 1783 return super(HasUserGroupPermissionAny, self).__call__(
1784 1784 check_location, user)
1785 1785
1786 1786 def check_permissions(self, user):
1787 1787 perms = user.permissions
1788 1788 try:
1789 1789 user_perms = set([perms['user_groups'][self.user_group_name]])
1790 1790 except KeyError:
1791 1791 return False
1792 1792 if self.required_perms.intersection(user_perms):
1793 1793 return True
1794 1794 return False
1795 1795
1796 1796
1797 1797 class HasUserGroupPermissionAll(PermsFunction):
1798 1798 def __call__(self, user_group_name=None, check_location='', user=None):
1799 1799 self.user_group_name = user_group_name
1800 1800 return super(HasUserGroupPermissionAll, self).__call__(
1801 1801 check_location, user)
1802 1802
1803 1803 def check_permissions(self, user):
1804 1804 perms = user.permissions
1805 1805 try:
1806 1806 user_perms = set([perms['user_groups'][self.user_group_name]])
1807 1807 except KeyError:
1808 1808 return False
1809 1809 if self.required_perms.issubset(user_perms):
1810 1810 return True
1811 1811 return False
1812 1812
1813 1813
1814 1814 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1815 1815 class HasPermissionAnyMiddleware(object):
1816 1816 def __init__(self, *perms):
1817 1817 self.required_perms = set(perms)
1818 1818
1819 1819 def __call__(self, user, repo_name):
1820 1820 # repo_name MUST be unicode, since we handle keys in permission
1821 1821 # dict by unicode
1822 1822 repo_name = safe_unicode(repo_name)
1823 1823 user = AuthUser(user.user_id)
1824 1824 log.debug(
1825 1825 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1826 1826 self.required_perms, user, repo_name)
1827 1827
1828 1828 if self.check_permissions(user, repo_name):
1829 1829 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1830 1830 repo_name, user, 'PermissionMiddleware')
1831 1831 return True
1832 1832
1833 1833 else:
1834 1834 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1835 1835 repo_name, user, 'PermissionMiddleware')
1836 1836 return False
1837 1837
1838 1838 def check_permissions(self, user, repo_name):
1839 1839 perms = user.permissions_with_scope({'repo_name': repo_name})
1840 1840
1841 1841 try:
1842 1842 user_perms = set([perms['repositories'][repo_name]])
1843 1843 except Exception:
1844 1844 log.exception('Error while accessing user permissions')
1845 1845 return False
1846 1846
1847 1847 if self.required_perms.intersection(user_perms):
1848 1848 return True
1849 1849 return False
1850 1850
1851 1851
1852 1852 # SPECIAL VERSION TO HANDLE API AUTH
1853 1853 class _BaseApiPerm(object):
1854 1854 def __init__(self, *perms):
1855 1855 self.required_perms = set(perms)
1856 1856
1857 1857 def __call__(self, check_location=None, user=None, repo_name=None,
1858 1858 group_name=None, user_group_name=None):
1859 1859 cls_name = self.__class__.__name__
1860 1860 check_scope = 'global:%s' % (self.required_perms,)
1861 1861 if repo_name:
1862 1862 check_scope += ', repo_name:%s' % (repo_name,)
1863 1863
1864 1864 if group_name:
1865 1865 check_scope += ', repo_group_name:%s' % (group_name,)
1866 1866
1867 1867 if user_group_name:
1868 1868 check_scope += ', user_group_name:%s' % (user_group_name,)
1869 1869
1870 1870 log.debug(
1871 1871 'checking cls:%s %s %s @ %s'
1872 1872 % (cls_name, self.required_perms, check_scope, check_location))
1873 1873 if not user:
1874 1874 log.debug('Empty User passed into arguments')
1875 1875 return False
1876 1876
1877 1877 # process user
1878 1878 if not isinstance(user, AuthUser):
1879 1879 user = AuthUser(user.user_id)
1880 1880 if not check_location:
1881 1881 check_location = 'unspecified'
1882 1882 if self.check_permissions(user.permissions, repo_name, group_name,
1883 1883 user_group_name):
1884 1884 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1885 1885 check_scope, user, check_location)
1886 1886 return True
1887 1887
1888 1888 else:
1889 1889 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1890 1890 check_scope, user, check_location)
1891 1891 return False
1892 1892
1893 1893 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1894 1894 user_group_name=None):
1895 1895 """
1896 1896 implement in child class should return True if permissions are ok,
1897 1897 False otherwise
1898 1898
1899 1899 :param perm_defs: dict with permission definitions
1900 1900 :param repo_name: repo name
1901 1901 """
1902 1902 raise NotImplementedError()
1903 1903
1904 1904
1905 1905 class HasPermissionAllApi(_BaseApiPerm):
1906 1906 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1907 1907 user_group_name=None):
1908 1908 if self.required_perms.issubset(perm_defs.get('global')):
1909 1909 return True
1910 1910 return False
1911 1911
1912 1912
1913 1913 class HasPermissionAnyApi(_BaseApiPerm):
1914 1914 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1915 1915 user_group_name=None):
1916 1916 if self.required_perms.intersection(perm_defs.get('global')):
1917 1917 return True
1918 1918 return False
1919 1919
1920 1920
1921 1921 class HasRepoPermissionAllApi(_BaseApiPerm):
1922 1922 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1923 1923 user_group_name=None):
1924 1924 try:
1925 1925 _user_perms = set([perm_defs['repositories'][repo_name]])
1926 1926 except KeyError:
1927 1927 log.warning(traceback.format_exc())
1928 1928 return False
1929 1929 if self.required_perms.issubset(_user_perms):
1930 1930 return True
1931 1931 return False
1932 1932
1933 1933
1934 1934 class HasRepoPermissionAnyApi(_BaseApiPerm):
1935 1935 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1936 1936 user_group_name=None):
1937 1937 try:
1938 1938 _user_perms = set([perm_defs['repositories'][repo_name]])
1939 1939 except KeyError:
1940 1940 log.warning(traceback.format_exc())
1941 1941 return False
1942 1942 if self.required_perms.intersection(_user_perms):
1943 1943 return True
1944 1944 return False
1945 1945
1946 1946
1947 1947 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1948 1948 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1949 1949 user_group_name=None):
1950 1950 try:
1951 1951 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1952 1952 except KeyError:
1953 1953 log.warning(traceback.format_exc())
1954 1954 return False
1955 1955 if self.required_perms.intersection(_user_perms):
1956 1956 return True
1957 1957 return False
1958 1958
1959 1959
1960 1960 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1961 1961 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1962 1962 user_group_name=None):
1963 1963 try:
1964 1964 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1965 1965 except KeyError:
1966 1966 log.warning(traceback.format_exc())
1967 1967 return False
1968 1968 if self.required_perms.issubset(_user_perms):
1969 1969 return True
1970 1970 return False
1971 1971
1972 1972
1973 1973 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1974 1974 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1975 1975 user_group_name=None):
1976 1976 try:
1977 1977 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1978 1978 except KeyError:
1979 1979 log.warning(traceback.format_exc())
1980 1980 return False
1981 1981 if self.required_perms.intersection(_user_perms):
1982 1982 return True
1983 1983 return False
1984 1984
1985 1985
1986 1986 def check_ip_access(source_ip, allowed_ips=None):
1987 1987 """
1988 1988 Checks if source_ip is a subnet of any of allowed_ips.
1989 1989
1990 1990 :param source_ip:
1991 1991 :param allowed_ips: list of allowed ips together with mask
1992 1992 """
1993 1993 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1994 source_ip_address = ipaddress.ip_address(source_ip)
1994 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
1995 1995 if isinstance(allowed_ips, (tuple, list, set)):
1996 1996 for ip in allowed_ips:
1997 ip = safe_unicode(ip)
1997 1998 try:
1998 1999 network_address = ipaddress.ip_network(ip, strict=False)
1999 2000 if source_ip_address in network_address:
2000 2001 log.debug('IP %s is network %s' %
2001 2002 (source_ip_address, network_address))
2002 2003 return True
2003 2004 # for any case we cannot determine the IP, don't crash just
2004 2005 # skip it and log as error, we want to say forbidden still when
2005 2006 # sending bad IP
2006 2007 except Exception:
2007 2008 log.error(traceback.format_exc())
2008 2009 continue
2009 2010 return False
2010 2011
2011 2012
2012 2013 def get_cython_compat_decorator(wrapper, func):
2013 2014 """
2014 2015 Creates a cython compatible decorator. The previously used
2015 2016 decorator.decorator() function seems to be incompatible with cython.
2016 2017
2017 2018 :param wrapper: __wrapper method of the decorator class
2018 2019 :param func: decorated function
2019 2020 """
2020 2021 @wraps(func)
2021 2022 def local_wrapper(*args, **kwds):
2022 2023 return wrapper(func, *args, **kwds)
2023 2024 local_wrapper.__wrapped__ = func
2024 2025 return local_wrapper
2025 2026
2026 2027
@@ -1,631 +1,632 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 The base Controller API
23 23 Provides the BaseController class for subclassing. And usage in different
24 24 controllers
25 25 """
26 26
27 27 import logging
28 28 import socket
29 29
30 30 import ipaddress
31 31 import pyramid.threadlocal
32 32
33 33 from paste.auth.basic import AuthBasicAuthenticator
34 34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 36 from pylons import config, tmpl_context as c, request, url
37 37 from pylons.controllers import WSGIController
38 38 from pylons.controllers.util import redirect
39 39 from pylons.i18n import translation
40 40 # marcink: don't remove this import
41 41 from pylons.templating import render_mako as render # noqa
42 42 from pylons.i18n.translation import _
43 43 from webob.exc import HTTPFound
44 44
45 45
46 46 import rhodecode
47 47 from rhodecode.authentication.base import VCS_TYPE
48 48 from rhodecode.lib import auth, utils2
49 49 from rhodecode.lib import helpers as h
50 50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
51 51 from rhodecode.lib.exceptions import UserCreationError
52 52 from rhodecode.lib.utils import (
53 53 get_repo_slug, set_rhodecode_config, password_changed,
54 54 get_enabled_hook_classes)
55 55 from rhodecode.lib.utils2 import (
56 56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
57 57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
58 58 from rhodecode.model import meta
59 59 from rhodecode.model.db import Repository, User, ChangesetComment
60 60 from rhodecode.model.notification import NotificationModel
61 61 from rhodecode.model.scm import ScmModel
62 62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
63 63
64 64
65 65 log = logging.getLogger(__name__)
66 66
67 67
68 68 def _filter_proxy(ip):
69 69 """
70 70 Passed in IP addresses in HEADERS can be in a special format of multiple
71 71 ips. Those comma separated IPs are passed from various proxies in the
72 72 chain of request processing. The left-most being the original client.
73 73 We only care about the first IP which came from the org. client.
74 74
75 75 :param ip: ip string from headers
76 76 """
77 77 if ',' in ip:
78 78 _ips = ip.split(',')
79 79 _first_ip = _ips[0].strip()
80 80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
81 81 return _first_ip
82 82 return ip
83 83
84 84
85 85 def _filter_port(ip):
86 86 """
87 87 Removes a port from ip, there are 4 main cases to handle here.
88 88 - ipv4 eg. 127.0.0.1
89 89 - ipv6 eg. ::1
90 90 - ipv4+port eg. 127.0.0.1:8080
91 91 - ipv6+port eg. [::1]:8080
92 92
93 93 :param ip:
94 94 """
95 95 def is_ipv6(ip_addr):
96 96 if hasattr(socket, 'inet_pton'):
97 97 try:
98 98 socket.inet_pton(socket.AF_INET6, ip_addr)
99 99 except socket.error:
100 100 return False
101 101 else:
102 102 # fallback to ipaddress
103 103 try:
104 ipaddress.IPv6Address(ip_addr)
104 ipaddress.IPv6Address(safe_unicode(ip_addr))
105 105 except Exception:
106 106 return False
107 107 return True
108 108
109 109 if ':' not in ip: # must be ipv4 pure ip
110 110 return ip
111 111
112 112 if '[' in ip and ']' in ip: # ipv6 with port
113 113 return ip.split(']')[0][1:].lower()
114 114
115 115 # must be ipv6 or ipv4 with port
116 116 if is_ipv6(ip):
117 117 return ip
118 118 else:
119 119 ip, _port = ip.split(':')[:2] # means ipv4+port
120 120 return ip
121 121
122 122
123 123 def get_ip_addr(environ):
124 124 proxy_key = 'HTTP_X_REAL_IP'
125 125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
126 126 def_key = 'REMOTE_ADDR'
127 127 _filters = lambda x: _filter_port(_filter_proxy(x))
128 128
129 129 ip = environ.get(proxy_key)
130 130 if ip:
131 131 return _filters(ip)
132 132
133 133 ip = environ.get(proxy_key2)
134 134 if ip:
135 135 return _filters(ip)
136 136
137 137 ip = environ.get(def_key, '0.0.0.0')
138 138 return _filters(ip)
139 139
140 140
141 141 def get_server_ip_addr(environ, log_errors=True):
142 142 hostname = environ.get('SERVER_NAME')
143 143 try:
144 144 return socket.gethostbyname(hostname)
145 145 except Exception as e:
146 146 if log_errors:
147 147 # in some cases this lookup is not possible, and we don't want to
148 148 # make it an exception in logs
149 149 log.exception('Could not retrieve server ip address: %s', e)
150 150 return hostname
151 151
152 152
153 153 def get_server_port(environ):
154 154 return environ.get('SERVER_PORT')
155 155
156 156
157 157 def get_access_path(environ):
158 158 path = environ.get('PATH_INFO')
159 159 org_req = environ.get('pylons.original_request')
160 160 if org_req:
161 161 path = org_req.environ.get('PATH_INFO')
162 162 return path
163 163
164 164
165 165 def get_user_agent(environ):
166 166 return environ.get('HTTP_USER_AGENT')
167 167
168 168
169 169 def vcs_operation_context(
170 170 environ, repo_name, username, action, scm, check_locking=True,
171 171 is_shadow_repo=False):
172 172 """
173 173 Generate the context for a vcs operation, e.g. push or pull.
174 174
175 175 This context is passed over the layers so that hooks triggered by the
176 176 vcs operation know details like the user, the user's IP address etc.
177 177
178 178 :param check_locking: Allows to switch of the computation of the locking
179 179 data. This serves mainly the need of the simplevcs middleware to be
180 180 able to disable this for certain operations.
181 181
182 182 """
183 183 # Tri-state value: False: unlock, None: nothing, True: lock
184 184 make_lock = None
185 185 locked_by = [None, None, None]
186 186 is_anonymous = username == User.DEFAULT_USER
187 187 if not is_anonymous and check_locking:
188 188 log.debug('Checking locking on repository "%s"', repo_name)
189 189 user = User.get_by_username(username)
190 190 repo = Repository.get_by_repo_name(repo_name)
191 191 make_lock, __, locked_by = repo.get_locking_state(
192 192 action, user.user_id)
193 193
194 194 settings_model = VcsSettingsModel(repo=repo_name)
195 195 ui_settings = settings_model.get_ui_settings()
196 196
197 197 extras = {
198 198 'ip': get_ip_addr(environ),
199 199 'username': username,
200 200 'action': action,
201 201 'repository': repo_name,
202 202 'scm': scm,
203 203 'config': rhodecode.CONFIG['__file__'],
204 204 'make_lock': make_lock,
205 205 'locked_by': locked_by,
206 206 'server_url': utils2.get_server_url(environ),
207 207 'user_agent': get_user_agent(environ),
208 208 'hooks': get_enabled_hook_classes(ui_settings),
209 209 'is_shadow_repo': is_shadow_repo,
210 210 }
211 211 return extras
212 212
213 213
214 214 class BasicAuth(AuthBasicAuthenticator):
215 215
216 216 def __init__(self, realm, authfunc, registry, auth_http_code=None,
217 217 initial_call_detection=False, acl_repo_name=None):
218 218 self.realm = realm
219 219 self.initial_call = initial_call_detection
220 220 self.authfunc = authfunc
221 221 self.registry = registry
222 222 self.acl_repo_name = acl_repo_name
223 223 self._rc_auth_http_code = auth_http_code
224 224
225 225 def _get_response_from_code(self, http_code):
226 226 try:
227 227 return get_exception(safe_int(http_code))
228 228 except Exception:
229 229 log.exception('Failed to fetch response for code %s' % http_code)
230 230 return HTTPForbidden
231 231
232 232 def build_authentication(self):
233 233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 234 if self._rc_auth_http_code and not self.initial_call:
235 235 # return alternative HTTP code if alternative http return code
236 236 # is specified in RhodeCode config, but ONLY if it's not the
237 237 # FIRST call
238 238 custom_response_klass = self._get_response_from_code(
239 239 self._rc_auth_http_code)
240 240 return custom_response_klass(headers=head)
241 241 return HTTPUnauthorized(headers=head)
242 242
243 243 def authenticate(self, environ):
244 244 authorization = AUTHORIZATION(environ)
245 245 if not authorization:
246 246 return self.build_authentication()
247 247 (authmeth, auth) = authorization.split(' ', 1)
248 248 if 'basic' != authmeth.lower():
249 249 return self.build_authentication()
250 250 auth = auth.strip().decode('base64')
251 251 _parts = auth.split(':', 1)
252 252 if len(_parts) == 2:
253 253 username, password = _parts
254 254 if self.authfunc(
255 255 username, password, environ, VCS_TYPE,
256 256 registry=self.registry, acl_repo_name=self.acl_repo_name):
257 257 return username
258 258 if username and password:
259 259 # we mark that we actually executed authentication once, at
260 260 # that point we can use the alternative auth code
261 261 self.initial_call = False
262 262
263 263 return self.build_authentication()
264 264
265 265 __call__ = authenticate
266 266
267 267
268 268 def calculate_version_hash():
269 269 return md5(
270 270 config.get('beaker.session.secret', '') +
271 271 rhodecode.__version__)[:8]
272 272
273 273
274 274 def get_current_lang(request):
275 275 # NOTE(marcink): remove after pyramid move
276 276 try:
277 277 return translation.get_lang()[0]
278 278 except:
279 279 pass
280 280
281 281 return getattr(request, '_LOCALE_', None)
282 282
283 283
284 284 def attach_context_attributes(context, request, user_id):
285 285 """
286 286 Attach variables into template context called `c`, please note that
287 287 request could be pylons or pyramid request in here.
288 288 """
289
289 290 rc_config = SettingsModel().get_all_settings(cache=True)
290 291
291 292 context.rhodecode_version = rhodecode.__version__
292 293 context.rhodecode_edition = config.get('rhodecode.edition')
293 294 # unique secret + version does not leak the version but keep consistency
294 295 context.rhodecode_version_hash = calculate_version_hash()
295 296
296 297 # Default language set for the incoming request
297 298 context.language = get_current_lang(request)
298 299
299 300 # Visual options
300 301 context.visual = AttributeDict({})
301 302
302 303 # DB stored Visual Items
303 304 context.visual.show_public_icon = str2bool(
304 305 rc_config.get('rhodecode_show_public_icon'))
305 306 context.visual.show_private_icon = str2bool(
306 307 rc_config.get('rhodecode_show_private_icon'))
307 308 context.visual.stylify_metatags = str2bool(
308 309 rc_config.get('rhodecode_stylify_metatags'))
309 310 context.visual.dashboard_items = safe_int(
310 311 rc_config.get('rhodecode_dashboard_items', 100))
311 312 context.visual.admin_grid_items = safe_int(
312 313 rc_config.get('rhodecode_admin_grid_items', 100))
313 314 context.visual.repository_fields = str2bool(
314 315 rc_config.get('rhodecode_repository_fields'))
315 316 context.visual.show_version = str2bool(
316 317 rc_config.get('rhodecode_show_version'))
317 318 context.visual.use_gravatar = str2bool(
318 319 rc_config.get('rhodecode_use_gravatar'))
319 320 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
320 321 context.visual.default_renderer = rc_config.get(
321 322 'rhodecode_markup_renderer', 'rst')
322 323 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
323 324 context.visual.rhodecode_support_url = \
324 325 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
325 326
326 327 context.pre_code = rc_config.get('rhodecode_pre_code')
327 328 context.post_code = rc_config.get('rhodecode_post_code')
328 329 context.rhodecode_name = rc_config.get('rhodecode_title')
329 330 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
330 331 # if we have specified default_encoding in the request, it has more
331 332 # priority
332 333 if request.GET.get('default_encoding'):
333 334 context.default_encodings.insert(0, request.GET.get('default_encoding'))
334 335 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
335 336
336 337 # INI stored
337 338 context.labs_active = str2bool(
338 339 config.get('labs_settings_active', 'false'))
339 340 context.visual.allow_repo_location_change = str2bool(
340 341 config.get('allow_repo_location_change', True))
341 342 context.visual.allow_custom_hooks_settings = str2bool(
342 343 config.get('allow_custom_hooks_settings', True))
343 344 context.debug_style = str2bool(config.get('debug_style', False))
344 345
345 346 context.rhodecode_instanceid = config.get('instance_id')
346 347
347 348 context.visual.cut_off_limit_diff = safe_int(
348 349 config.get('cut_off_limit_diff'))
349 350 context.visual.cut_off_limit_file = safe_int(
350 351 config.get('cut_off_limit_file'))
351 352
352 353 # AppEnlight
353 354 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
354 355 context.appenlight_api_public_key = config.get(
355 356 'appenlight.api_public_key', '')
356 357 context.appenlight_server_url = config.get('appenlight.server_url', '')
357 358
358 359 # JS template context
359 360 context.template_context = {
360 361 'repo_name': None,
361 362 'repo_type': None,
362 363 'repo_landing_commit': None,
363 364 'rhodecode_user': {
364 365 'username': None,
365 366 'email': None,
366 367 'notification_status': False
367 368 },
368 369 'visual': {
369 370 'default_renderer': None
370 371 },
371 372 'commit_data': {
372 373 'commit_id': None
373 374 },
374 375 'pull_request_data': {'pull_request_id': None},
375 376 'timeago': {
376 377 'refresh_time': 120 * 1000,
377 378 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
378 379 },
379 380 'pylons_dispatch': {
380 381 # 'controller': request.environ['pylons.routes_dict']['controller'],
381 382 # 'action': request.environ['pylons.routes_dict']['action'],
382 383 },
383 384 'pyramid_dispatch': {
384 385
385 386 },
386 387 'extra': {'plugins': {}}
387 388 }
388 389 # END CONFIG VARS
389 390
390 391 # TODO: This dosn't work when called from pylons compatibility tween.
391 392 # Fix this and remove it from base controller.
392 393 # context.repo_name = get_repo_slug(request) # can be empty
393 394
394 395 diffmode = 'sideside'
395 396 if request.GET.get('diffmode'):
396 397 if request.GET['diffmode'] == 'unified':
397 398 diffmode = 'unified'
398 399 elif request.session.get('diffmode'):
399 400 diffmode = request.session['diffmode']
400 401
401 402 context.diffmode = diffmode
402 403
403 404 if request.session.get('diffmode') != diffmode:
404 405 request.session['diffmode'] = diffmode
405 406
406 407 context.csrf_token = auth.get_csrf_token(session=request.session)
407 408 context.backends = rhodecode.BACKENDS.keys()
408 409 context.backends.sort()
409 410 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
410 411
411 412 # NOTE(marcink): when migrated to pyramid we don't need to set this anymore,
412 413 # given request will ALWAYS be pyramid one
413 414 pyramid_request = pyramid.threadlocal.get_current_request()
414 415 context.pyramid_request = pyramid_request
415 416
416 417 # web case
417 418 if hasattr(pyramid_request, 'user'):
418 419 context.auth_user = pyramid_request.user
419 420 context.rhodecode_user = pyramid_request.user
420 421
421 422 # api case
422 423 if hasattr(pyramid_request, 'rpc_user'):
423 424 context.auth_user = pyramid_request.rpc_user
424 425 context.rhodecode_user = pyramid_request.rpc_user
425 426
426 427 # attach the whole call context to the request
427 428 request.call_context = context
428 429
429 430
430 431 def get_auth_user(request):
431 432 environ = request.environ
432 433 session = request.session
433 434
434 435 ip_addr = get_ip_addr(environ)
435 436 # make sure that we update permissions each time we call controller
436 437 _auth_token = (request.GET.get('auth_token', '') or
437 438 request.GET.get('api_key', ''))
438 439
439 440 if _auth_token:
440 441 # when using API_KEY we assume user exists, and
441 442 # doesn't need auth based on cookies.
442 443 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
443 444 authenticated = False
444 445 else:
445 446 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
446 447 try:
447 448 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
448 449 ip_addr=ip_addr)
449 450 except UserCreationError as e:
450 451 h.flash(e, 'error')
451 452 # container auth or other auth functions that create users
452 453 # on the fly can throw this exception signaling that there's
453 454 # issue with user creation, explanation should be provided
454 455 # in Exception itself. We then create a simple blank
455 456 # AuthUser
456 457 auth_user = AuthUser(ip_addr=ip_addr)
457 458
458 459 if password_changed(auth_user, session):
459 460 session.invalidate()
460 461 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
461 462 auth_user = AuthUser(ip_addr=ip_addr)
462 463
463 464 authenticated = cookie_store.get('is_authenticated')
464 465
465 466 if not auth_user.is_authenticated and auth_user.is_user_object:
466 467 # user is not authenticated and not empty
467 468 auth_user.set_authenticated(authenticated)
468 469
469 470 return auth_user
470 471
471 472
472 473 class BaseController(WSGIController):
473 474
474 475 def __before__(self):
475 476 """
476 477 __before__ is called before controller methods and after __call__
477 478 """
478 479 # on each call propagate settings calls into global settings.
479 480 set_rhodecode_config(config)
480 481 attach_context_attributes(c, request, self._rhodecode_user.user_id)
481 482
482 483 # TODO: Remove this when fixed in attach_context_attributes()
483 484 c.repo_name = get_repo_slug(request) # can be empty
484 485
485 486 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
486 487 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
487 488 self.sa = meta.Session
488 489 self.scm_model = ScmModel(self.sa)
489 490
490 491 # set user language
491 492 user_lang = getattr(c.pyramid_request, '_LOCALE_', None)
492 493 if user_lang:
493 494 translation.set_lang(user_lang)
494 495 log.debug('set language to %s for user %s',
495 496 user_lang, self._rhodecode_user)
496 497
497 498 def _dispatch_redirect(self, with_url, environ, start_response):
498 499 resp = HTTPFound(with_url)
499 500 environ['SCRIPT_NAME'] = '' # handle prefix middleware
500 501 environ['PATH_INFO'] = with_url
501 502 return resp(environ, start_response)
502 503
503 504 def __call__(self, environ, start_response):
504 505 """Invoke the Controller"""
505 506 # WSGIController.__call__ dispatches to the Controller method
506 507 # the request is routed to. This routing information is
507 508 # available in environ['pylons.routes_dict']
508 509 from rhodecode.lib import helpers as h
509 510
510 511 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
511 512 if environ.get('debugtoolbar.wants_pylons_context', False):
512 513 environ['debugtoolbar.pylons_context'] = c._current_obj()
513 514
514 515 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
515 516 environ['pylons.routes_dict']['action']])
516 517
517 518 self.rc_config = SettingsModel().get_all_settings(cache=True)
518 519 self.ip_addr = get_ip_addr(environ)
519 520
520 521 # The rhodecode auth user is looked up and passed through the
521 522 # environ by the pylons compatibility tween in pyramid.
522 523 # So we can just grab it from there.
523 524 auth_user = environ['rc_auth_user']
524 525
525 526 # set globals for auth user
526 527 request.user = auth_user
527 528 self._rhodecode_user = auth_user
528 529
529 530 log.info('IP: %s User: %s accessed %s [%s]' % (
530 531 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
531 532 _route_name)
532 533 )
533 534
534 535 user_obj = auth_user.get_instance()
535 536 if user_obj and user_obj.user_data.get('force_password_change'):
536 537 h.flash('You are required to change your password', 'warning',
537 538 ignore_duplicate=True)
538 539 return self._dispatch_redirect(
539 540 url('my_account_password'), environ, start_response)
540 541
541 542 return WSGIController.__call__(self, environ, start_response)
542 543
543 544
544 545 class BaseRepoController(BaseController):
545 546 """
546 547 Base class for controllers responsible for loading all needed data for
547 548 repository loaded items are
548 549
549 550 c.rhodecode_repo: instance of scm repository
550 551 c.rhodecode_db_repo: instance of db
551 552 c.repository_requirements_missing: shows that repository specific data
552 553 could not be displayed due to the missing requirements
553 554 c.repository_pull_requests: show number of open pull requests
554 555 """
555 556
556 557 def __before__(self):
557 558 super(BaseRepoController, self).__before__()
558 559 if c.repo_name: # extracted from routes
559 560 db_repo = Repository.get_by_repo_name(c.repo_name)
560 561 if not db_repo:
561 562 return
562 563
563 564 log.debug(
564 565 'Found repository in database %s with state `%s`',
565 566 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
566 567 route = getattr(request.environ.get('routes.route'), 'name', '')
567 568
568 569 # allow to delete repos that are somehow damages in filesystem
569 570 if route in ['delete_repo']:
570 571 return
571 572
572 573 if db_repo.repo_state in [Repository.STATE_PENDING]:
573 574 if route in ['repo_creating_home']:
574 575 return
575 576 check_url = url('repo_creating_home', repo_name=c.repo_name)
576 577 return redirect(check_url)
577 578
578 579 self.rhodecode_db_repo = db_repo
579 580
580 581 missing_requirements = False
581 582 try:
582 583 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
583 584 except RepositoryRequirementError as e:
584 585 missing_requirements = True
585 586 self._handle_missing_requirements(e)
586 587
587 588 if self.rhodecode_repo is None and not missing_requirements:
588 589 log.error('%s this repository is present in database but it '
589 590 'cannot be created as an scm instance', c.repo_name)
590 591
591 592 h.flash(_(
592 593 "The repository at %(repo_name)s cannot be located.") %
593 594 {'repo_name': c.repo_name},
594 595 category='error', ignore_duplicate=True)
595 596 redirect(h.route_path('home'))
596 597
597 598 # update last change according to VCS data
598 599 if not missing_requirements:
599 600 commit = db_repo.get_commit(
600 601 pre_load=["author", "date", "message", "parents"])
601 602 db_repo.update_commit_cache(commit)
602 603
603 604 # Prepare context
604 605 c.rhodecode_db_repo = db_repo
605 606 c.rhodecode_repo = self.rhodecode_repo
606 607 c.repository_requirements_missing = missing_requirements
607 608
608 609 self._update_global_counters(self.scm_model, db_repo)
609 610
610 611 def _update_global_counters(self, scm_model, db_repo):
611 612 """
612 613 Base variables that are exposed to every page of repository
613 614 """
614 615 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
615 616
616 617 def _handle_missing_requirements(self, error):
617 618 self.rhodecode_repo = None
618 619 log.error(
619 620 'Requirements are missing for repository %s: %s',
620 621 c.repo_name, error.message)
621 622
622 623 summary_url = h.route_path('repo_summary', repo_name=c.repo_name)
623 624 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
624 625 settings_update_url = url('repo', repo_name=c.repo_name)
625 626 path = request.path
626 627 should_redirect = (
627 628 path not in (summary_url, settings_update_url)
628 629 and '/settings' not in path or path == statistics_url
629 630 )
630 631 if should_redirect:
631 632 redirect(summary_url)
@@ -1,4117 +1,4117 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.ext.declarative import declared_attr
40 40 from sqlalchemy.ext.hybrid import hybrid_property
41 41 from sqlalchemy.orm import (
42 42 relationship, joinedload, class_mapper, validates, aliased)
43 43 from sqlalchemy.sql.expression import true
44 44 from beaker.cache import cache_region
45 45 from zope.cachedescriptors.property import Lazy as LazyProperty
46 46
47 47 from pylons.i18n.translation import lazy_ugettext as _
48 48 from pyramid.threadlocal import get_current_request
49 49
50 50 from rhodecode.lib.vcs import get_vcs_instance
51 51 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
52 52 from rhodecode.lib.utils2 import (
53 53 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
54 54 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
55 55 glob2re, StrictAttributeDict, cleaned_uri)
56 56 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType
57 57 from rhodecode.lib.ext_json import json
58 58 from rhodecode.lib.caching_query import FromCache
59 59 from rhodecode.lib.encrypt import AESCipher
60 60
61 61 from rhodecode.model.meta import Base, Session
62 62
63 63 URL_SEP = '/'
64 64 log = logging.getLogger(__name__)
65 65
66 66 # =============================================================================
67 67 # BASE CLASSES
68 68 # =============================================================================
69 69
70 70 # this is propagated from .ini file rhodecode.encrypted_values.secret or
71 71 # beaker.session.secret if first is not set.
72 72 # and initialized at environment.py
73 73 ENCRYPTION_KEY = None
74 74
75 75 # used to sort permissions by types, '#' used here is not allowed to be in
76 76 # usernames, and it's very early in sorted string.printable table.
77 77 PERMISSION_TYPE_SORT = {
78 78 'admin': '####',
79 79 'write': '###',
80 80 'read': '##',
81 81 'none': '#',
82 82 }
83 83
84 84
85 85 def display_sort(obj):
86 86 """
87 87 Sort function used to sort permissions in .permissions() function of
88 88 Repository, RepoGroup, UserGroup. Also it put the default user in front
89 89 of all other resources
90 90 """
91 91
92 92 if obj.username == User.DEFAULT_USER:
93 93 return '#####'
94 94 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
95 95 return prefix + obj.username
96 96
97 97
98 98 def _hash_key(k):
99 99 return md5_safe(k)
100 100
101 101
102 102 class EncryptedTextValue(TypeDecorator):
103 103 """
104 104 Special column for encrypted long text data, use like::
105 105
106 106 value = Column("encrypted_value", EncryptedValue(), nullable=False)
107 107
108 108 This column is intelligent so if value is in unencrypted form it return
109 109 unencrypted form, but on save it always encrypts
110 110 """
111 111 impl = Text
112 112
113 113 def process_bind_param(self, value, dialect):
114 114 if not value:
115 115 return value
116 116 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
117 117 # protect against double encrypting if someone manually starts
118 118 # doing
119 119 raise ValueError('value needs to be in unencrypted format, ie. '
120 120 'not starting with enc$aes')
121 121 return 'enc$aes_hmac$%s' % AESCipher(
122 122 ENCRYPTION_KEY, hmac=True).encrypt(value)
123 123
124 124 def process_result_value(self, value, dialect):
125 125 import rhodecode
126 126
127 127 if not value:
128 128 return value
129 129
130 130 parts = value.split('$', 3)
131 131 if not len(parts) == 3:
132 132 # probably not encrypted values
133 133 return value
134 134 else:
135 135 if parts[0] != 'enc':
136 136 # parts ok but without our header ?
137 137 return value
138 138 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
139 139 'rhodecode.encrypted_values.strict') or True)
140 140 # at that stage we know it's our encryption
141 141 if parts[1] == 'aes':
142 142 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
143 143 elif parts[1] == 'aes_hmac':
144 144 decrypted_data = AESCipher(
145 145 ENCRYPTION_KEY, hmac=True,
146 146 strict_verification=enc_strict_mode).decrypt(parts[2])
147 147 else:
148 148 raise ValueError(
149 149 'Encryption type part is wrong, must be `aes` '
150 150 'or `aes_hmac`, got `%s` instead' % (parts[1]))
151 151 return decrypted_data
152 152
153 153
154 154 class BaseModel(object):
155 155 """
156 156 Base Model for all classes
157 157 """
158 158
159 159 @classmethod
160 160 def _get_keys(cls):
161 161 """return column names for this model """
162 162 return class_mapper(cls).c.keys()
163 163
164 164 def get_dict(self):
165 165 """
166 166 return dict with keys and values corresponding
167 167 to this model data """
168 168
169 169 d = {}
170 170 for k in self._get_keys():
171 171 d[k] = getattr(self, k)
172 172
173 173 # also use __json__() if present to get additional fields
174 174 _json_attr = getattr(self, '__json__', None)
175 175 if _json_attr:
176 176 # update with attributes from __json__
177 177 if callable(_json_attr):
178 178 _json_attr = _json_attr()
179 179 for k, val in _json_attr.iteritems():
180 180 d[k] = val
181 181 return d
182 182
183 183 def get_appstruct(self):
184 184 """return list with keys and values tuples corresponding
185 185 to this model data """
186 186
187 187 l = []
188 188 for k in self._get_keys():
189 189 l.append((k, getattr(self, k),))
190 190 return l
191 191
192 192 def populate_obj(self, populate_dict):
193 193 """populate model with data from given populate_dict"""
194 194
195 195 for k in self._get_keys():
196 196 if k in populate_dict:
197 197 setattr(self, k, populate_dict[k])
198 198
199 199 @classmethod
200 200 def query(cls):
201 201 return Session().query(cls)
202 202
203 203 @classmethod
204 204 def get(cls, id_):
205 205 if id_:
206 206 return cls.query().get(id_)
207 207
208 208 @classmethod
209 209 def get_or_404(cls, id_, pyramid_exc=False):
210 210 if pyramid_exc:
211 211 # NOTE(marcink): backward compat, once migration to pyramid
212 212 # this should only use pyramid exceptions
213 213 from pyramid.httpexceptions import HTTPNotFound
214 214 else:
215 215 from webob.exc import HTTPNotFound
216 216
217 217 try:
218 218 id_ = int(id_)
219 219 except (TypeError, ValueError):
220 220 raise HTTPNotFound
221 221
222 222 res = cls.query().get(id_)
223 223 if not res:
224 224 raise HTTPNotFound
225 225 return res
226 226
227 227 @classmethod
228 228 def getAll(cls):
229 229 # deprecated and left for backward compatibility
230 230 return cls.get_all()
231 231
232 232 @classmethod
233 233 def get_all(cls):
234 234 return cls.query().all()
235 235
236 236 @classmethod
237 237 def delete(cls, id_):
238 238 obj = cls.query().get(id_)
239 239 Session().delete(obj)
240 240
241 241 @classmethod
242 242 def identity_cache(cls, session, attr_name, value):
243 243 exist_in_session = []
244 244 for (item_cls, pkey), instance in session.identity_map.items():
245 245 if cls == item_cls and getattr(instance, attr_name) == value:
246 246 exist_in_session.append(instance)
247 247 if exist_in_session:
248 248 if len(exist_in_session) == 1:
249 249 return exist_in_session[0]
250 250 log.exception(
251 251 'multiple objects with attr %s and '
252 252 'value %s found with same name: %r',
253 253 attr_name, value, exist_in_session)
254 254
255 255 def __repr__(self):
256 256 if hasattr(self, '__unicode__'):
257 257 # python repr needs to return str
258 258 try:
259 259 return safe_str(self.__unicode__())
260 260 except UnicodeDecodeError:
261 261 pass
262 262 return '<DB:%s>' % (self.__class__.__name__)
263 263
264 264
265 265 class RhodeCodeSetting(Base, BaseModel):
266 266 __tablename__ = 'rhodecode_settings'
267 267 __table_args__ = (
268 268 UniqueConstraint('app_settings_name'),
269 269 {'extend_existing': True, 'mysql_engine': 'InnoDB',
270 270 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
271 271 )
272 272
273 273 SETTINGS_TYPES = {
274 274 'str': safe_str,
275 275 'int': safe_int,
276 276 'unicode': safe_unicode,
277 277 'bool': str2bool,
278 278 'list': functools.partial(aslist, sep=',')
279 279 }
280 280 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
281 281 GLOBAL_CONF_KEY = 'app_settings'
282 282
283 283 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
284 284 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
285 285 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
286 286 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
287 287
288 288 def __init__(self, key='', val='', type='unicode'):
289 289 self.app_settings_name = key
290 290 self.app_settings_type = type
291 291 self.app_settings_value = val
292 292
293 293 @validates('_app_settings_value')
294 294 def validate_settings_value(self, key, val):
295 295 assert type(val) == unicode
296 296 return val
297 297
298 298 @hybrid_property
299 299 def app_settings_value(self):
300 300 v = self._app_settings_value
301 301 _type = self.app_settings_type
302 302 if _type:
303 303 _type = self.app_settings_type.split('.')[0]
304 304 # decode the encrypted value
305 305 if 'encrypted' in self.app_settings_type:
306 306 cipher = EncryptedTextValue()
307 307 v = safe_unicode(cipher.process_result_value(v, None))
308 308
309 309 converter = self.SETTINGS_TYPES.get(_type) or \
310 310 self.SETTINGS_TYPES['unicode']
311 311 return converter(v)
312 312
313 313 @app_settings_value.setter
314 314 def app_settings_value(self, val):
315 315 """
316 316 Setter that will always make sure we use unicode in app_settings_value
317 317
318 318 :param val:
319 319 """
320 320 val = safe_unicode(val)
321 321 # encode the encrypted value
322 322 if 'encrypted' in self.app_settings_type:
323 323 cipher = EncryptedTextValue()
324 324 val = safe_unicode(cipher.process_bind_param(val, None))
325 325 self._app_settings_value = val
326 326
327 327 @hybrid_property
328 328 def app_settings_type(self):
329 329 return self._app_settings_type
330 330
331 331 @app_settings_type.setter
332 332 def app_settings_type(self, val):
333 333 if val.split('.')[0] not in self.SETTINGS_TYPES:
334 334 raise Exception('type must be one of %s got %s'
335 335 % (self.SETTINGS_TYPES.keys(), val))
336 336 self._app_settings_type = val
337 337
338 338 def __unicode__(self):
339 339 return u"<%s('%s:%s[%s]')>" % (
340 340 self.__class__.__name__,
341 341 self.app_settings_name, self.app_settings_value,
342 342 self.app_settings_type
343 343 )
344 344
345 345
346 346 class RhodeCodeUi(Base, BaseModel):
347 347 __tablename__ = 'rhodecode_ui'
348 348 __table_args__ = (
349 349 UniqueConstraint('ui_key'),
350 350 {'extend_existing': True, 'mysql_engine': 'InnoDB',
351 351 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
352 352 )
353 353
354 354 HOOK_REPO_SIZE = 'changegroup.repo_size'
355 355 # HG
356 356 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
357 357 HOOK_PULL = 'outgoing.pull_logger'
358 358 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
359 359 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
360 360 HOOK_PUSH = 'changegroup.push_logger'
361 361 HOOK_PUSH_KEY = 'pushkey.key_push'
362 362
363 363 # TODO: johbo: Unify way how hooks are configured for git and hg,
364 364 # git part is currently hardcoded.
365 365
366 366 # SVN PATTERNS
367 367 SVN_BRANCH_ID = 'vcs_svn_branch'
368 368 SVN_TAG_ID = 'vcs_svn_tag'
369 369
370 370 ui_id = Column(
371 371 "ui_id", Integer(), nullable=False, unique=True, default=None,
372 372 primary_key=True)
373 373 ui_section = Column(
374 374 "ui_section", String(255), nullable=True, unique=None, default=None)
375 375 ui_key = Column(
376 376 "ui_key", String(255), nullable=True, unique=None, default=None)
377 377 ui_value = Column(
378 378 "ui_value", String(255), nullable=True, unique=None, default=None)
379 379 ui_active = Column(
380 380 "ui_active", Boolean(), nullable=True, unique=None, default=True)
381 381
382 382 def __repr__(self):
383 383 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
384 384 self.ui_key, self.ui_value)
385 385
386 386
387 387 class RepoRhodeCodeSetting(Base, BaseModel):
388 388 __tablename__ = 'repo_rhodecode_settings'
389 389 __table_args__ = (
390 390 UniqueConstraint(
391 391 'app_settings_name', 'repository_id',
392 392 name='uq_repo_rhodecode_setting_name_repo_id'),
393 393 {'extend_existing': True, 'mysql_engine': 'InnoDB',
394 394 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
395 395 )
396 396
397 397 repository_id = Column(
398 398 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
399 399 nullable=False)
400 400 app_settings_id = Column(
401 401 "app_settings_id", Integer(), nullable=False, unique=True,
402 402 default=None, primary_key=True)
403 403 app_settings_name = Column(
404 404 "app_settings_name", String(255), nullable=True, unique=None,
405 405 default=None)
406 406 _app_settings_value = Column(
407 407 "app_settings_value", String(4096), nullable=True, unique=None,
408 408 default=None)
409 409 _app_settings_type = Column(
410 410 "app_settings_type", String(255), nullable=True, unique=None,
411 411 default=None)
412 412
413 413 repository = relationship('Repository')
414 414
415 415 def __init__(self, repository_id, key='', val='', type='unicode'):
416 416 self.repository_id = repository_id
417 417 self.app_settings_name = key
418 418 self.app_settings_type = type
419 419 self.app_settings_value = val
420 420
421 421 @validates('_app_settings_value')
422 422 def validate_settings_value(self, key, val):
423 423 assert type(val) == unicode
424 424 return val
425 425
426 426 @hybrid_property
427 427 def app_settings_value(self):
428 428 v = self._app_settings_value
429 429 type_ = self.app_settings_type
430 430 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
431 431 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
432 432 return converter(v)
433 433
434 434 @app_settings_value.setter
435 435 def app_settings_value(self, val):
436 436 """
437 437 Setter that will always make sure we use unicode in app_settings_value
438 438
439 439 :param val:
440 440 """
441 441 self._app_settings_value = safe_unicode(val)
442 442
443 443 @hybrid_property
444 444 def app_settings_type(self):
445 445 return self._app_settings_type
446 446
447 447 @app_settings_type.setter
448 448 def app_settings_type(self, val):
449 449 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
450 450 if val not in SETTINGS_TYPES:
451 451 raise Exception('type must be one of %s got %s'
452 452 % (SETTINGS_TYPES.keys(), val))
453 453 self._app_settings_type = val
454 454
455 455 def __unicode__(self):
456 456 return u"<%s('%s:%s:%s[%s]')>" % (
457 457 self.__class__.__name__, self.repository.repo_name,
458 458 self.app_settings_name, self.app_settings_value,
459 459 self.app_settings_type
460 460 )
461 461
462 462
463 463 class RepoRhodeCodeUi(Base, BaseModel):
464 464 __tablename__ = 'repo_rhodecode_ui'
465 465 __table_args__ = (
466 466 UniqueConstraint(
467 467 'repository_id', 'ui_section', 'ui_key',
468 468 name='uq_repo_rhodecode_ui_repository_id_section_key'),
469 469 {'extend_existing': True, 'mysql_engine': 'InnoDB',
470 470 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
471 471 )
472 472
473 473 repository_id = Column(
474 474 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
475 475 nullable=False)
476 476 ui_id = Column(
477 477 "ui_id", Integer(), nullable=False, unique=True, default=None,
478 478 primary_key=True)
479 479 ui_section = Column(
480 480 "ui_section", String(255), nullable=True, unique=None, default=None)
481 481 ui_key = Column(
482 482 "ui_key", String(255), nullable=True, unique=None, default=None)
483 483 ui_value = Column(
484 484 "ui_value", String(255), nullable=True, unique=None, default=None)
485 485 ui_active = Column(
486 486 "ui_active", Boolean(), nullable=True, unique=None, default=True)
487 487
488 488 repository = relationship('Repository')
489 489
490 490 def __repr__(self):
491 491 return '<%s[%s:%s]%s=>%s]>' % (
492 492 self.__class__.__name__, self.repository.repo_name,
493 493 self.ui_section, self.ui_key, self.ui_value)
494 494
495 495
496 496 class User(Base, BaseModel):
497 497 __tablename__ = 'users'
498 498 __table_args__ = (
499 499 UniqueConstraint('username'), UniqueConstraint('email'),
500 500 Index('u_username_idx', 'username'),
501 501 Index('u_email_idx', 'email'),
502 502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
503 503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
504 504 )
505 505 DEFAULT_USER = 'default'
506 506 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
507 507 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
508 508
509 509 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
510 510 username = Column("username", String(255), nullable=True, unique=None, default=None)
511 511 password = Column("password", String(255), nullable=True, unique=None, default=None)
512 512 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
513 513 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
514 514 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
515 515 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
516 516 _email = Column("email", String(255), nullable=True, unique=None, default=None)
517 517 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
518 518 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
519 519
520 520 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
521 521 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
522 522 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
523 523 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
524 524 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
525 525 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
526 526
527 527 user_log = relationship('UserLog')
528 528 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
529 529
530 530 repositories = relationship('Repository')
531 531 repository_groups = relationship('RepoGroup')
532 532 user_groups = relationship('UserGroup')
533 533
534 534 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
535 535 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
536 536
537 537 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
538 538 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
539 539 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
540 540
541 541 group_member = relationship('UserGroupMember', cascade='all')
542 542
543 543 notifications = relationship('UserNotification', cascade='all')
544 544 # notifications assigned to this user
545 545 user_created_notifications = relationship('Notification', cascade='all')
546 546 # comments created by this user
547 547 user_comments = relationship('ChangesetComment', cascade='all')
548 548 # user profile extra info
549 549 user_emails = relationship('UserEmailMap', cascade='all')
550 550 user_ip_map = relationship('UserIpMap', cascade='all')
551 551 user_auth_tokens = relationship('UserApiKeys', cascade='all')
552 552 # gists
553 553 user_gists = relationship('Gist', cascade='all')
554 554 # user pull requests
555 555 user_pull_requests = relationship('PullRequest', cascade='all')
556 556 # external identities
557 557 extenal_identities = relationship(
558 558 'ExternalIdentity',
559 559 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
560 560 cascade='all')
561 561
562 562 def __unicode__(self):
563 563 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
564 564 self.user_id, self.username)
565 565
566 566 @hybrid_property
567 567 def email(self):
568 568 return self._email
569 569
570 570 @email.setter
571 571 def email(self, val):
572 572 self._email = val.lower() if val else None
573 573
574 574 @hybrid_property
575 575 def first_name(self):
576 576 from rhodecode.lib import helpers as h
577 577 if self.name:
578 578 return h.escape(self.name)
579 579 return self.name
580 580
581 581 @hybrid_property
582 582 def last_name(self):
583 583 from rhodecode.lib import helpers as h
584 584 if self.lastname:
585 585 return h.escape(self.lastname)
586 586 return self.lastname
587 587
588 588 @hybrid_property
589 589 def api_key(self):
590 590 """
591 591 Fetch if exist an auth-token with role ALL connected to this user
592 592 """
593 593 user_auth_token = UserApiKeys.query()\
594 594 .filter(UserApiKeys.user_id == self.user_id)\
595 595 .filter(or_(UserApiKeys.expires == -1,
596 596 UserApiKeys.expires >= time.time()))\
597 597 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
598 598 if user_auth_token:
599 599 user_auth_token = user_auth_token.api_key
600 600
601 601 return user_auth_token
602 602
603 603 @api_key.setter
604 604 def api_key(self, val):
605 605 # don't allow to set API key this is deprecated for now
606 606 self._api_key = None
607 607
608 608 @property
609 609 def firstname(self):
610 610 # alias for future
611 611 return self.name
612 612
613 613 @property
614 614 def emails(self):
615 615 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
616 616 return [self.email] + [x.email for x in other]
617 617
618 618 @property
619 619 def auth_tokens(self):
620 620 return [x.api_key for x in self.extra_auth_tokens]
621 621
622 622 @property
623 623 def extra_auth_tokens(self):
624 624 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
625 625
626 626 @property
627 627 def feed_token(self):
628 628 return self.get_feed_token()
629 629
630 630 def get_feed_token(self):
631 631 feed_tokens = UserApiKeys.query()\
632 632 .filter(UserApiKeys.user == self)\
633 633 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
634 634 .all()
635 635 if feed_tokens:
636 636 return feed_tokens[0].api_key
637 637 return 'NO_FEED_TOKEN_AVAILABLE'
638 638
639 639 @classmethod
640 640 def extra_valid_auth_tokens(cls, user, role=None):
641 641 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
642 642 .filter(or_(UserApiKeys.expires == -1,
643 643 UserApiKeys.expires >= time.time()))
644 644 if role:
645 645 tokens = tokens.filter(or_(UserApiKeys.role == role,
646 646 UserApiKeys.role == UserApiKeys.ROLE_ALL))
647 647 return tokens.all()
648 648
649 649 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
650 650 from rhodecode.lib import auth
651 651
652 652 log.debug('Trying to authenticate user: %s via auth-token, '
653 653 'and roles: %s', self, roles)
654 654
655 655 if not auth_token:
656 656 return False
657 657
658 658 crypto_backend = auth.crypto_backend()
659 659
660 660 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
661 661 tokens_q = UserApiKeys.query()\
662 662 .filter(UserApiKeys.user_id == self.user_id)\
663 663 .filter(or_(UserApiKeys.expires == -1,
664 664 UserApiKeys.expires >= time.time()))
665 665
666 666 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
667 667
668 668 plain_tokens = []
669 669 hash_tokens = []
670 670
671 671 for token in tokens_q.all():
672 672 # verify scope first
673 673 if token.repo_id:
674 674 # token has a scope, we need to verify it
675 675 if scope_repo_id != token.repo_id:
676 676 log.debug(
677 677 'Scope mismatch: token has a set repo scope: %s, '
678 678 'and calling scope is:%s, skipping further checks',
679 679 token.repo, scope_repo_id)
680 680 # token has a scope, and it doesn't match, skip token
681 681 continue
682 682
683 683 if token.api_key.startswith(crypto_backend.ENC_PREF):
684 684 hash_tokens.append(token.api_key)
685 685 else:
686 686 plain_tokens.append(token.api_key)
687 687
688 688 is_plain_match = auth_token in plain_tokens
689 689 if is_plain_match:
690 690 return True
691 691
692 692 for hashed in hash_tokens:
693 693 # TODO(marcink): this is expensive to calculate, but most secure
694 694 match = crypto_backend.hash_check(auth_token, hashed)
695 695 if match:
696 696 return True
697 697
698 698 return False
699 699
700 700 @property
701 701 def ip_addresses(self):
702 702 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
703 703 return [x.ip_addr for x in ret]
704 704
705 705 @property
706 706 def username_and_name(self):
707 707 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
708 708
709 709 @property
710 710 def username_or_name_or_email(self):
711 711 full_name = self.full_name if self.full_name is not ' ' else None
712 712 return self.username or full_name or self.email
713 713
714 714 @property
715 715 def full_name(self):
716 716 return '%s %s' % (self.first_name, self.last_name)
717 717
718 718 @property
719 719 def full_name_or_username(self):
720 720 return ('%s %s' % (self.first_name, self.last_name)
721 721 if (self.first_name and self.last_name) else self.username)
722 722
723 723 @property
724 724 def full_contact(self):
725 725 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
726 726
727 727 @property
728 728 def short_contact(self):
729 729 return '%s %s' % (self.first_name, self.last_name)
730 730
731 731 @property
732 732 def is_admin(self):
733 733 return self.admin
734 734
735 735 @property
736 736 def AuthUser(self):
737 737 """
738 738 Returns instance of AuthUser for this user
739 739 """
740 740 from rhodecode.lib.auth import AuthUser
741 741 return AuthUser(user_id=self.user_id, username=self.username)
742 742
743 743 @hybrid_property
744 744 def user_data(self):
745 745 if not self._user_data:
746 746 return {}
747 747
748 748 try:
749 749 return json.loads(self._user_data)
750 750 except TypeError:
751 751 return {}
752 752
753 753 @user_data.setter
754 754 def user_data(self, val):
755 755 if not isinstance(val, dict):
756 756 raise Exception('user_data must be dict, got %s' % type(val))
757 757 try:
758 758 self._user_data = json.dumps(val)
759 759 except Exception:
760 760 log.error(traceback.format_exc())
761 761
762 762 @classmethod
763 763 def get_by_username(cls, username, case_insensitive=False,
764 764 cache=False, identity_cache=False):
765 765 session = Session()
766 766
767 767 if case_insensitive:
768 768 q = cls.query().filter(
769 769 func.lower(cls.username) == func.lower(username))
770 770 else:
771 771 q = cls.query().filter(cls.username == username)
772 772
773 773 if cache:
774 774 if identity_cache:
775 775 val = cls.identity_cache(session, 'username', username)
776 776 if val:
777 777 return val
778 778 else:
779 779 cache_key = "get_user_by_name_%s" % _hash_key(username)
780 780 q = q.options(
781 781 FromCache("sql_cache_short", cache_key))
782 782
783 783 return q.scalar()
784 784
785 785 @classmethod
786 786 def get_by_auth_token(cls, auth_token, cache=False):
787 787 q = UserApiKeys.query()\
788 788 .filter(UserApiKeys.api_key == auth_token)\
789 789 .filter(or_(UserApiKeys.expires == -1,
790 790 UserApiKeys.expires >= time.time()))
791 791 if cache:
792 792 q = q.options(
793 793 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
794 794
795 795 match = q.first()
796 796 if match:
797 797 return match.user
798 798
799 799 @classmethod
800 800 def get_by_email(cls, email, case_insensitive=False, cache=False):
801 801
802 802 if case_insensitive:
803 803 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
804 804
805 805 else:
806 806 q = cls.query().filter(cls.email == email)
807 807
808 808 email_key = _hash_key(email)
809 809 if cache:
810 810 q = q.options(
811 811 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
812 812
813 813 ret = q.scalar()
814 814 if ret is None:
815 815 q = UserEmailMap.query()
816 816 # try fetching in alternate email map
817 817 if case_insensitive:
818 818 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
819 819 else:
820 820 q = q.filter(UserEmailMap.email == email)
821 821 q = q.options(joinedload(UserEmailMap.user))
822 822 if cache:
823 823 q = q.options(
824 824 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
825 825 ret = getattr(q.scalar(), 'user', None)
826 826
827 827 return ret
828 828
829 829 @classmethod
830 830 def get_from_cs_author(cls, author):
831 831 """
832 832 Tries to get User objects out of commit author string
833 833
834 834 :param author:
835 835 """
836 836 from rhodecode.lib.helpers import email, author_name
837 837 # Valid email in the attribute passed, see if they're in the system
838 838 _email = email(author)
839 839 if _email:
840 840 user = cls.get_by_email(_email, case_insensitive=True)
841 841 if user:
842 842 return user
843 843 # Maybe we can match by username?
844 844 _author = author_name(author)
845 845 user = cls.get_by_username(_author, case_insensitive=True)
846 846 if user:
847 847 return user
848 848
849 849 def update_userdata(self, **kwargs):
850 850 usr = self
851 851 old = usr.user_data
852 852 old.update(**kwargs)
853 853 usr.user_data = old
854 854 Session().add(usr)
855 855 log.debug('updated userdata with ', kwargs)
856 856
857 857 def update_lastlogin(self):
858 858 """Update user lastlogin"""
859 859 self.last_login = datetime.datetime.now()
860 860 Session().add(self)
861 861 log.debug('updated user %s lastlogin', self.username)
862 862
863 863 def update_lastactivity(self):
864 864 """Update user lastactivity"""
865 865 self.last_activity = datetime.datetime.now()
866 866 Session().add(self)
867 867 log.debug('updated user %s lastactivity', self.username)
868 868
869 869 def update_password(self, new_password):
870 870 from rhodecode.lib.auth import get_crypt_password
871 871
872 872 self.password = get_crypt_password(new_password)
873 873 Session().add(self)
874 874
875 875 @classmethod
876 876 def get_first_super_admin(cls):
877 877 user = User.query().filter(User.admin == true()).first()
878 878 if user is None:
879 879 raise Exception('FATAL: Missing administrative account!')
880 880 return user
881 881
882 882 @classmethod
883 883 def get_all_super_admins(cls):
884 884 """
885 885 Returns all admin accounts sorted by username
886 886 """
887 887 return User.query().filter(User.admin == true())\
888 888 .order_by(User.username.asc()).all()
889 889
890 890 @classmethod
891 891 def get_default_user(cls, cache=False, refresh=False):
892 892 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
893 893 if user is None:
894 894 raise Exception('FATAL: Missing default account!')
895 895 if refresh:
896 896 # The default user might be based on outdated state which
897 897 # has been loaded from the cache.
898 898 # A call to refresh() ensures that the
899 899 # latest state from the database is used.
900 900 Session().refresh(user)
901 901 return user
902 902
903 903 def _get_default_perms(self, user, suffix=''):
904 904 from rhodecode.model.permission import PermissionModel
905 905 return PermissionModel().get_default_perms(user.user_perms, suffix)
906 906
907 907 def get_default_perms(self, suffix=''):
908 908 return self._get_default_perms(self, suffix)
909 909
910 910 def get_api_data(self, include_secrets=False, details='full'):
911 911 """
912 912 Common function for generating user related data for API
913 913
914 914 :param include_secrets: By default secrets in the API data will be replaced
915 915 by a placeholder value to prevent exposing this data by accident. In case
916 916 this data shall be exposed, set this flag to ``True``.
917 917
918 918 :param details: details can be 'basic|full' basic gives only a subset of
919 919 the available user information that includes user_id, name and emails.
920 920 """
921 921 user = self
922 922 user_data = self.user_data
923 923 data = {
924 924 'user_id': user.user_id,
925 925 'username': user.username,
926 926 'firstname': user.name,
927 927 'lastname': user.lastname,
928 928 'email': user.email,
929 929 'emails': user.emails,
930 930 }
931 931 if details == 'basic':
932 932 return data
933 933
934 934 api_key_length = 40
935 935 api_key_replacement = '*' * api_key_length
936 936
937 937 extras = {
938 938 'api_keys': [api_key_replacement],
939 939 'auth_tokens': [api_key_replacement],
940 940 'active': user.active,
941 941 'admin': user.admin,
942 942 'extern_type': user.extern_type,
943 943 'extern_name': user.extern_name,
944 944 'last_login': user.last_login,
945 945 'last_activity': user.last_activity,
946 946 'ip_addresses': user.ip_addresses,
947 947 'language': user_data.get('language')
948 948 }
949 949 data.update(extras)
950 950
951 951 if include_secrets:
952 952 data['api_keys'] = user.auth_tokens
953 953 data['auth_tokens'] = user.extra_auth_tokens
954 954 return data
955 955
956 956 def __json__(self):
957 957 data = {
958 958 'full_name': self.full_name,
959 959 'full_name_or_username': self.full_name_or_username,
960 960 'short_contact': self.short_contact,
961 961 'full_contact': self.full_contact,
962 962 }
963 963 data.update(self.get_api_data())
964 964 return data
965 965
966 966
967 967 class UserApiKeys(Base, BaseModel):
968 968 __tablename__ = 'user_api_keys'
969 969 __table_args__ = (
970 970 Index('uak_api_key_idx', 'api_key'),
971 971 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
972 972 UniqueConstraint('api_key'),
973 973 {'extend_existing': True, 'mysql_engine': 'InnoDB',
974 974 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
975 975 )
976 976 __mapper_args__ = {}
977 977
978 978 # ApiKey role
979 979 ROLE_ALL = 'token_role_all'
980 980 ROLE_HTTP = 'token_role_http'
981 981 ROLE_VCS = 'token_role_vcs'
982 982 ROLE_API = 'token_role_api'
983 983 ROLE_FEED = 'token_role_feed'
984 984 ROLE_PASSWORD_RESET = 'token_password_reset'
985 985
986 986 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
987 987
988 988 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
989 989 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
990 990 api_key = Column("api_key", String(255), nullable=False, unique=True)
991 991 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
992 992 expires = Column('expires', Float(53), nullable=False)
993 993 role = Column('role', String(255), nullable=True)
994 994 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
995 995
996 996 # scope columns
997 997 repo_id = Column(
998 998 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
999 999 nullable=True, unique=None, default=None)
1000 1000 repo = relationship('Repository', lazy='joined')
1001 1001
1002 1002 repo_group_id = Column(
1003 1003 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1004 1004 nullable=True, unique=None, default=None)
1005 1005 repo_group = relationship('RepoGroup', lazy='joined')
1006 1006
1007 1007 user = relationship('User', lazy='joined')
1008 1008
1009 1009 def __unicode__(self):
1010 1010 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1011 1011
1012 1012 def __json__(self):
1013 1013 data = {
1014 1014 'auth_token': self.api_key,
1015 1015 'role': self.role,
1016 1016 'scope': self.scope_humanized,
1017 1017 'expired': self.expired
1018 1018 }
1019 1019 return data
1020 1020
1021 1021 def get_api_data(self, include_secrets=False):
1022 1022 data = self.__json__()
1023 1023 if include_secrets:
1024 1024 return data
1025 1025 else:
1026 1026 data['auth_token'] = self.token_obfuscated
1027 1027 return data
1028 1028
1029 1029 @hybrid_property
1030 1030 def description_safe(self):
1031 1031 from rhodecode.lib import helpers as h
1032 1032 return h.escape(self.description)
1033 1033
1034 1034 @property
1035 1035 def expired(self):
1036 1036 if self.expires == -1:
1037 1037 return False
1038 1038 return time.time() > self.expires
1039 1039
1040 1040 @classmethod
1041 1041 def _get_role_name(cls, role):
1042 1042 return {
1043 1043 cls.ROLE_ALL: _('all'),
1044 1044 cls.ROLE_HTTP: _('http/web interface'),
1045 1045 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1046 1046 cls.ROLE_API: _('api calls'),
1047 1047 cls.ROLE_FEED: _('feed access'),
1048 1048 }.get(role, role)
1049 1049
1050 1050 @property
1051 1051 def role_humanized(self):
1052 1052 return self._get_role_name(self.role)
1053 1053
1054 1054 def _get_scope(self):
1055 1055 if self.repo:
1056 1056 return repr(self.repo)
1057 1057 if self.repo_group:
1058 1058 return repr(self.repo_group) + ' (recursive)'
1059 1059 return 'global'
1060 1060
1061 1061 @property
1062 1062 def scope_humanized(self):
1063 1063 return self._get_scope()
1064 1064
1065 1065 @property
1066 1066 def token_obfuscated(self):
1067 1067 if self.api_key:
1068 1068 return self.api_key[:4] + "****"
1069 1069
1070 1070
1071 1071 class UserEmailMap(Base, BaseModel):
1072 1072 __tablename__ = 'user_email_map'
1073 1073 __table_args__ = (
1074 1074 Index('uem_email_idx', 'email'),
1075 1075 UniqueConstraint('email'),
1076 1076 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1077 1077 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1078 1078 )
1079 1079 __mapper_args__ = {}
1080 1080
1081 1081 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1082 1082 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1083 1083 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1084 1084 user = relationship('User', lazy='joined')
1085 1085
1086 1086 @validates('_email')
1087 1087 def validate_email(self, key, email):
1088 1088 # check if this email is not main one
1089 1089 main_email = Session().query(User).filter(User.email == email).scalar()
1090 1090 if main_email is not None:
1091 1091 raise AttributeError('email %s is present is user table' % email)
1092 1092 return email
1093 1093
1094 1094 @hybrid_property
1095 1095 def email(self):
1096 1096 return self._email
1097 1097
1098 1098 @email.setter
1099 1099 def email(self, val):
1100 1100 self._email = val.lower() if val else None
1101 1101
1102 1102
1103 1103 class UserIpMap(Base, BaseModel):
1104 1104 __tablename__ = 'user_ip_map'
1105 1105 __table_args__ = (
1106 1106 UniqueConstraint('user_id', 'ip_addr'),
1107 1107 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1108 1108 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1109 1109 )
1110 1110 __mapper_args__ = {}
1111 1111
1112 1112 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1113 1113 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1114 1114 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1115 1115 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1116 1116 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1117 1117 user = relationship('User', lazy='joined')
1118 1118
1119 1119 @hybrid_property
1120 1120 def description_safe(self):
1121 1121 from rhodecode.lib import helpers as h
1122 1122 return h.escape(self.description)
1123 1123
1124 1124 @classmethod
1125 1125 def _get_ip_range(cls, ip_addr):
1126 net = ipaddress.ip_network(ip_addr, strict=False)
1126 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1127 1127 return [str(net.network_address), str(net.broadcast_address)]
1128 1128
1129 1129 def __json__(self):
1130 1130 return {
1131 1131 'ip_addr': self.ip_addr,
1132 1132 'ip_range': self._get_ip_range(self.ip_addr),
1133 1133 }
1134 1134
1135 1135 def __unicode__(self):
1136 1136 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1137 1137 self.user_id, self.ip_addr)
1138 1138
1139 1139
1140 1140 class UserLog(Base, BaseModel):
1141 1141 __tablename__ = 'user_logs'
1142 1142 __table_args__ = (
1143 1143 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1144 1144 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1145 1145 )
1146 1146 VERSION_1 = 'v1'
1147 1147 VERSION_2 = 'v2'
1148 1148 VERSIONS = [VERSION_1, VERSION_2]
1149 1149
1150 1150 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1151 1151 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1152 1152 username = Column("username", String(255), nullable=True, unique=None, default=None)
1153 1153 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1154 1154 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1155 1155 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1156 1156 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1157 1157 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1158 1158
1159 1159 version = Column("version", String(255), nullable=True, default=VERSION_1)
1160 1160 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1161 1161 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1162 1162
1163 1163 def __unicode__(self):
1164 1164 return u"<%s('id:%s:%s')>" % (
1165 1165 self.__class__.__name__, self.repository_name, self.action)
1166 1166
1167 1167 def __json__(self):
1168 1168 return {
1169 1169 'user_id': self.user_id,
1170 1170 'username': self.username,
1171 1171 'repository_id': self.repository_id,
1172 1172 'repository_name': self.repository_name,
1173 1173 'user_ip': self.user_ip,
1174 1174 'action_date': self.action_date,
1175 1175 'action': self.action,
1176 1176 }
1177 1177
1178 1178 @property
1179 1179 def action_as_day(self):
1180 1180 return datetime.date(*self.action_date.timetuple()[:3])
1181 1181
1182 1182 user = relationship('User')
1183 1183 repository = relationship('Repository', cascade='')
1184 1184
1185 1185
1186 1186 class UserGroup(Base, BaseModel):
1187 1187 __tablename__ = 'users_groups'
1188 1188 __table_args__ = (
1189 1189 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1190 1190 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1191 1191 )
1192 1192
1193 1193 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1194 1194 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1195 1195 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1196 1196 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1197 1197 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1198 1198 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1199 1199 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1200 1200 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1201 1201
1202 1202 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1203 1203 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1204 1204 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1205 1205 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1206 1206 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1207 1207 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1208 1208
1209 1209 user = relationship('User')
1210 1210
1211 1211 @hybrid_property
1212 1212 def description_safe(self):
1213 1213 from rhodecode.lib import helpers as h
1214 1214 return h.escape(self.description)
1215 1215
1216 1216 @hybrid_property
1217 1217 def group_data(self):
1218 1218 if not self._group_data:
1219 1219 return {}
1220 1220
1221 1221 try:
1222 1222 return json.loads(self._group_data)
1223 1223 except TypeError:
1224 1224 return {}
1225 1225
1226 1226 @group_data.setter
1227 1227 def group_data(self, val):
1228 1228 try:
1229 1229 self._group_data = json.dumps(val)
1230 1230 except Exception:
1231 1231 log.error(traceback.format_exc())
1232 1232
1233 1233 def __unicode__(self):
1234 1234 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1235 1235 self.users_group_id,
1236 1236 self.users_group_name)
1237 1237
1238 1238 @classmethod
1239 1239 def get_by_group_name(cls, group_name, cache=False,
1240 1240 case_insensitive=False):
1241 1241 if case_insensitive:
1242 1242 q = cls.query().filter(func.lower(cls.users_group_name) ==
1243 1243 func.lower(group_name))
1244 1244
1245 1245 else:
1246 1246 q = cls.query().filter(cls.users_group_name == group_name)
1247 1247 if cache:
1248 1248 q = q.options(
1249 1249 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1250 1250 return q.scalar()
1251 1251
1252 1252 @classmethod
1253 1253 def get(cls, user_group_id, cache=False):
1254 1254 user_group = cls.query()
1255 1255 if cache:
1256 1256 user_group = user_group.options(
1257 1257 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1258 1258 return user_group.get(user_group_id)
1259 1259
1260 1260 def permissions(self, with_admins=True, with_owner=True):
1261 1261 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1262 1262 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1263 1263 joinedload(UserUserGroupToPerm.user),
1264 1264 joinedload(UserUserGroupToPerm.permission),)
1265 1265
1266 1266 # get owners and admins and permissions. We do a trick of re-writing
1267 1267 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1268 1268 # has a global reference and changing one object propagates to all
1269 1269 # others. This means if admin is also an owner admin_row that change
1270 1270 # would propagate to both objects
1271 1271 perm_rows = []
1272 1272 for _usr in q.all():
1273 1273 usr = AttributeDict(_usr.user.get_dict())
1274 1274 usr.permission = _usr.permission.permission_name
1275 1275 perm_rows.append(usr)
1276 1276
1277 1277 # filter the perm rows by 'default' first and then sort them by
1278 1278 # admin,write,read,none permissions sorted again alphabetically in
1279 1279 # each group
1280 1280 perm_rows = sorted(perm_rows, key=display_sort)
1281 1281
1282 1282 _admin_perm = 'usergroup.admin'
1283 1283 owner_row = []
1284 1284 if with_owner:
1285 1285 usr = AttributeDict(self.user.get_dict())
1286 1286 usr.owner_row = True
1287 1287 usr.permission = _admin_perm
1288 1288 owner_row.append(usr)
1289 1289
1290 1290 super_admin_rows = []
1291 1291 if with_admins:
1292 1292 for usr in User.get_all_super_admins():
1293 1293 # if this admin is also owner, don't double the record
1294 1294 if usr.user_id == owner_row[0].user_id:
1295 1295 owner_row[0].admin_row = True
1296 1296 else:
1297 1297 usr = AttributeDict(usr.get_dict())
1298 1298 usr.admin_row = True
1299 1299 usr.permission = _admin_perm
1300 1300 super_admin_rows.append(usr)
1301 1301
1302 1302 return super_admin_rows + owner_row + perm_rows
1303 1303
1304 1304 def permission_user_groups(self):
1305 1305 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1306 1306 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1307 1307 joinedload(UserGroupUserGroupToPerm.target_user_group),
1308 1308 joinedload(UserGroupUserGroupToPerm.permission),)
1309 1309
1310 1310 perm_rows = []
1311 1311 for _user_group in q.all():
1312 1312 usr = AttributeDict(_user_group.user_group.get_dict())
1313 1313 usr.permission = _user_group.permission.permission_name
1314 1314 perm_rows.append(usr)
1315 1315
1316 1316 return perm_rows
1317 1317
1318 1318 def _get_default_perms(self, user_group, suffix=''):
1319 1319 from rhodecode.model.permission import PermissionModel
1320 1320 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1321 1321
1322 1322 def get_default_perms(self, suffix=''):
1323 1323 return self._get_default_perms(self, suffix)
1324 1324
1325 1325 def get_api_data(self, with_group_members=True, include_secrets=False):
1326 1326 """
1327 1327 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1328 1328 basically forwarded.
1329 1329
1330 1330 """
1331 1331 user_group = self
1332 1332 data = {
1333 1333 'users_group_id': user_group.users_group_id,
1334 1334 'group_name': user_group.users_group_name,
1335 1335 'group_description': user_group.user_group_description,
1336 1336 'active': user_group.users_group_active,
1337 1337 'owner': user_group.user.username,
1338 1338 'owner_email': user_group.user.email,
1339 1339 }
1340 1340
1341 1341 if with_group_members:
1342 1342 users = []
1343 1343 for user in user_group.members:
1344 1344 user = user.user
1345 1345 users.append(user.get_api_data(include_secrets=include_secrets))
1346 1346 data['users'] = users
1347 1347
1348 1348 return data
1349 1349
1350 1350
1351 1351 class UserGroupMember(Base, BaseModel):
1352 1352 __tablename__ = 'users_groups_members'
1353 1353 __table_args__ = (
1354 1354 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1355 1355 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1356 1356 )
1357 1357
1358 1358 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1359 1359 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1360 1360 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1361 1361
1362 1362 user = relationship('User', lazy='joined')
1363 1363 users_group = relationship('UserGroup')
1364 1364
1365 1365 def __init__(self, gr_id='', u_id=''):
1366 1366 self.users_group_id = gr_id
1367 1367 self.user_id = u_id
1368 1368
1369 1369
1370 1370 class RepositoryField(Base, BaseModel):
1371 1371 __tablename__ = 'repositories_fields'
1372 1372 __table_args__ = (
1373 1373 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1374 1374 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1375 1375 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1376 1376 )
1377 1377 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1378 1378
1379 1379 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1380 1380 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1381 1381 field_key = Column("field_key", String(250))
1382 1382 field_label = Column("field_label", String(1024), nullable=False)
1383 1383 field_value = Column("field_value", String(10000), nullable=False)
1384 1384 field_desc = Column("field_desc", String(1024), nullable=False)
1385 1385 field_type = Column("field_type", String(255), nullable=False, unique=None)
1386 1386 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1387 1387
1388 1388 repository = relationship('Repository')
1389 1389
1390 1390 @property
1391 1391 def field_key_prefixed(self):
1392 1392 return 'ex_%s' % self.field_key
1393 1393
1394 1394 @classmethod
1395 1395 def un_prefix_key(cls, key):
1396 1396 if key.startswith(cls.PREFIX):
1397 1397 return key[len(cls.PREFIX):]
1398 1398 return key
1399 1399
1400 1400 @classmethod
1401 1401 def get_by_key_name(cls, key, repo):
1402 1402 row = cls.query()\
1403 1403 .filter(cls.repository == repo)\
1404 1404 .filter(cls.field_key == key).scalar()
1405 1405 return row
1406 1406
1407 1407
1408 1408 class Repository(Base, BaseModel):
1409 1409 __tablename__ = 'repositories'
1410 1410 __table_args__ = (
1411 1411 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1412 1412 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1413 1413 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1414 1414 )
1415 1415 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1416 1416 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1417 1417
1418 1418 STATE_CREATED = 'repo_state_created'
1419 1419 STATE_PENDING = 'repo_state_pending'
1420 1420 STATE_ERROR = 'repo_state_error'
1421 1421
1422 1422 LOCK_AUTOMATIC = 'lock_auto'
1423 1423 LOCK_API = 'lock_api'
1424 1424 LOCK_WEB = 'lock_web'
1425 1425 LOCK_PULL = 'lock_pull'
1426 1426
1427 1427 NAME_SEP = URL_SEP
1428 1428
1429 1429 repo_id = Column(
1430 1430 "repo_id", Integer(), nullable=False, unique=True, default=None,
1431 1431 primary_key=True)
1432 1432 _repo_name = Column(
1433 1433 "repo_name", Text(), nullable=False, default=None)
1434 1434 _repo_name_hash = Column(
1435 1435 "repo_name_hash", String(255), nullable=False, unique=True)
1436 1436 repo_state = Column("repo_state", String(255), nullable=True)
1437 1437
1438 1438 clone_uri = Column(
1439 1439 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1440 1440 default=None)
1441 1441 repo_type = Column(
1442 1442 "repo_type", String(255), nullable=False, unique=False, default=None)
1443 1443 user_id = Column(
1444 1444 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1445 1445 unique=False, default=None)
1446 1446 private = Column(
1447 1447 "private", Boolean(), nullable=True, unique=None, default=None)
1448 1448 enable_statistics = Column(
1449 1449 "statistics", Boolean(), nullable=True, unique=None, default=True)
1450 1450 enable_downloads = Column(
1451 1451 "downloads", Boolean(), nullable=True, unique=None, default=True)
1452 1452 description = Column(
1453 1453 "description", String(10000), nullable=True, unique=None, default=None)
1454 1454 created_on = Column(
1455 1455 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1456 1456 default=datetime.datetime.now)
1457 1457 updated_on = Column(
1458 1458 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1459 1459 default=datetime.datetime.now)
1460 1460 _landing_revision = Column(
1461 1461 "landing_revision", String(255), nullable=False, unique=False,
1462 1462 default=None)
1463 1463 enable_locking = Column(
1464 1464 "enable_locking", Boolean(), nullable=False, unique=None,
1465 1465 default=False)
1466 1466 _locked = Column(
1467 1467 "locked", String(255), nullable=True, unique=False, default=None)
1468 1468 _changeset_cache = Column(
1469 1469 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1470 1470
1471 1471 fork_id = Column(
1472 1472 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1473 1473 nullable=True, unique=False, default=None)
1474 1474 group_id = Column(
1475 1475 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1476 1476 unique=False, default=None)
1477 1477
1478 1478 user = relationship('User', lazy='joined')
1479 1479 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1480 1480 group = relationship('RepoGroup', lazy='joined')
1481 1481 repo_to_perm = relationship(
1482 1482 'UserRepoToPerm', cascade='all',
1483 1483 order_by='UserRepoToPerm.repo_to_perm_id')
1484 1484 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1485 1485 stats = relationship('Statistics', cascade='all', uselist=False)
1486 1486
1487 1487 followers = relationship(
1488 1488 'UserFollowing',
1489 1489 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1490 1490 cascade='all')
1491 1491 extra_fields = relationship(
1492 1492 'RepositoryField', cascade="all, delete, delete-orphan")
1493 1493 logs = relationship('UserLog')
1494 1494 comments = relationship(
1495 1495 'ChangesetComment', cascade="all, delete, delete-orphan")
1496 1496 pull_requests_source = relationship(
1497 1497 'PullRequest',
1498 1498 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1499 1499 cascade="all, delete, delete-orphan")
1500 1500 pull_requests_target = relationship(
1501 1501 'PullRequest',
1502 1502 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1503 1503 cascade="all, delete, delete-orphan")
1504 1504 ui = relationship('RepoRhodeCodeUi', cascade="all")
1505 1505 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1506 1506 integrations = relationship('Integration',
1507 1507 cascade="all, delete, delete-orphan")
1508 1508
1509 1509 def __unicode__(self):
1510 1510 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1511 1511 safe_unicode(self.repo_name))
1512 1512
1513 1513 @hybrid_property
1514 1514 def description_safe(self):
1515 1515 from rhodecode.lib import helpers as h
1516 1516 return h.escape(self.description)
1517 1517
1518 1518 @hybrid_property
1519 1519 def landing_rev(self):
1520 1520 # always should return [rev_type, rev]
1521 1521 if self._landing_revision:
1522 1522 _rev_info = self._landing_revision.split(':')
1523 1523 if len(_rev_info) < 2:
1524 1524 _rev_info.insert(0, 'rev')
1525 1525 return [_rev_info[0], _rev_info[1]]
1526 1526 return [None, None]
1527 1527
1528 1528 @landing_rev.setter
1529 1529 def landing_rev(self, val):
1530 1530 if ':' not in val:
1531 1531 raise ValueError('value must be delimited with `:` and consist '
1532 1532 'of <rev_type>:<rev>, got %s instead' % val)
1533 1533 self._landing_revision = val
1534 1534
1535 1535 @hybrid_property
1536 1536 def locked(self):
1537 1537 if self._locked:
1538 1538 user_id, timelocked, reason = self._locked.split(':')
1539 1539 lock_values = int(user_id), timelocked, reason
1540 1540 else:
1541 1541 lock_values = [None, None, None]
1542 1542 return lock_values
1543 1543
1544 1544 @locked.setter
1545 1545 def locked(self, val):
1546 1546 if val and isinstance(val, (list, tuple)):
1547 1547 self._locked = ':'.join(map(str, val))
1548 1548 else:
1549 1549 self._locked = None
1550 1550
1551 1551 @hybrid_property
1552 1552 def changeset_cache(self):
1553 1553 from rhodecode.lib.vcs.backends.base import EmptyCommit
1554 1554 dummy = EmptyCommit().__json__()
1555 1555 if not self._changeset_cache:
1556 1556 return dummy
1557 1557 try:
1558 1558 return json.loads(self._changeset_cache)
1559 1559 except TypeError:
1560 1560 return dummy
1561 1561 except Exception:
1562 1562 log.error(traceback.format_exc())
1563 1563 return dummy
1564 1564
1565 1565 @changeset_cache.setter
1566 1566 def changeset_cache(self, val):
1567 1567 try:
1568 1568 self._changeset_cache = json.dumps(val)
1569 1569 except Exception:
1570 1570 log.error(traceback.format_exc())
1571 1571
1572 1572 @hybrid_property
1573 1573 def repo_name(self):
1574 1574 return self._repo_name
1575 1575
1576 1576 @repo_name.setter
1577 1577 def repo_name(self, value):
1578 1578 self._repo_name = value
1579 1579 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1580 1580
1581 1581 @classmethod
1582 1582 def normalize_repo_name(cls, repo_name):
1583 1583 """
1584 1584 Normalizes os specific repo_name to the format internally stored inside
1585 1585 database using URL_SEP
1586 1586
1587 1587 :param cls:
1588 1588 :param repo_name:
1589 1589 """
1590 1590 return cls.NAME_SEP.join(repo_name.split(os.sep))
1591 1591
1592 1592 @classmethod
1593 1593 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1594 1594 session = Session()
1595 1595 q = session.query(cls).filter(cls.repo_name == repo_name)
1596 1596
1597 1597 if cache:
1598 1598 if identity_cache:
1599 1599 val = cls.identity_cache(session, 'repo_name', repo_name)
1600 1600 if val:
1601 1601 return val
1602 1602 else:
1603 1603 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1604 1604 q = q.options(
1605 1605 FromCache("sql_cache_short", cache_key))
1606 1606
1607 1607 return q.scalar()
1608 1608
1609 1609 @classmethod
1610 1610 def get_by_full_path(cls, repo_full_path):
1611 1611 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1612 1612 repo_name = cls.normalize_repo_name(repo_name)
1613 1613 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1614 1614
1615 1615 @classmethod
1616 1616 def get_repo_forks(cls, repo_id):
1617 1617 return cls.query().filter(Repository.fork_id == repo_id)
1618 1618
1619 1619 @classmethod
1620 1620 def base_path(cls):
1621 1621 """
1622 1622 Returns base path when all repos are stored
1623 1623
1624 1624 :param cls:
1625 1625 """
1626 1626 q = Session().query(RhodeCodeUi)\
1627 1627 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1628 1628 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1629 1629 return q.one().ui_value
1630 1630
1631 1631 @classmethod
1632 1632 def is_valid(cls, repo_name):
1633 1633 """
1634 1634 returns True if given repo name is a valid filesystem repository
1635 1635
1636 1636 :param cls:
1637 1637 :param repo_name:
1638 1638 """
1639 1639 from rhodecode.lib.utils import is_valid_repo
1640 1640
1641 1641 return is_valid_repo(repo_name, cls.base_path())
1642 1642
1643 1643 @classmethod
1644 1644 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1645 1645 case_insensitive=True):
1646 1646 q = Repository.query()
1647 1647
1648 1648 if not isinstance(user_id, Optional):
1649 1649 q = q.filter(Repository.user_id == user_id)
1650 1650
1651 1651 if not isinstance(group_id, Optional):
1652 1652 q = q.filter(Repository.group_id == group_id)
1653 1653
1654 1654 if case_insensitive:
1655 1655 q = q.order_by(func.lower(Repository.repo_name))
1656 1656 else:
1657 1657 q = q.order_by(Repository.repo_name)
1658 1658 return q.all()
1659 1659
1660 1660 @property
1661 1661 def forks(self):
1662 1662 """
1663 1663 Return forks of this repo
1664 1664 """
1665 1665 return Repository.get_repo_forks(self.repo_id)
1666 1666
1667 1667 @property
1668 1668 def parent(self):
1669 1669 """
1670 1670 Returns fork parent
1671 1671 """
1672 1672 return self.fork
1673 1673
1674 1674 @property
1675 1675 def just_name(self):
1676 1676 return self.repo_name.split(self.NAME_SEP)[-1]
1677 1677
1678 1678 @property
1679 1679 def groups_with_parents(self):
1680 1680 groups = []
1681 1681 if self.group is None:
1682 1682 return groups
1683 1683
1684 1684 cur_gr = self.group
1685 1685 groups.insert(0, cur_gr)
1686 1686 while 1:
1687 1687 gr = getattr(cur_gr, 'parent_group', None)
1688 1688 cur_gr = cur_gr.parent_group
1689 1689 if gr is None:
1690 1690 break
1691 1691 groups.insert(0, gr)
1692 1692
1693 1693 return groups
1694 1694
1695 1695 @property
1696 1696 def groups_and_repo(self):
1697 1697 return self.groups_with_parents, self
1698 1698
1699 1699 @LazyProperty
1700 1700 def repo_path(self):
1701 1701 """
1702 1702 Returns base full path for that repository means where it actually
1703 1703 exists on a filesystem
1704 1704 """
1705 1705 q = Session().query(RhodeCodeUi).filter(
1706 1706 RhodeCodeUi.ui_key == self.NAME_SEP)
1707 1707 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1708 1708 return q.one().ui_value
1709 1709
1710 1710 @property
1711 1711 def repo_full_path(self):
1712 1712 p = [self.repo_path]
1713 1713 # we need to split the name by / since this is how we store the
1714 1714 # names in the database, but that eventually needs to be converted
1715 1715 # into a valid system path
1716 1716 p += self.repo_name.split(self.NAME_SEP)
1717 1717 return os.path.join(*map(safe_unicode, p))
1718 1718
1719 1719 @property
1720 1720 def cache_keys(self):
1721 1721 """
1722 1722 Returns associated cache keys for that repo
1723 1723 """
1724 1724 return CacheKey.query()\
1725 1725 .filter(CacheKey.cache_args == self.repo_name)\
1726 1726 .order_by(CacheKey.cache_key)\
1727 1727 .all()
1728 1728
1729 1729 def get_new_name(self, repo_name):
1730 1730 """
1731 1731 returns new full repository name based on assigned group and new new
1732 1732
1733 1733 :param group_name:
1734 1734 """
1735 1735 path_prefix = self.group.full_path_splitted if self.group else []
1736 1736 return self.NAME_SEP.join(path_prefix + [repo_name])
1737 1737
1738 1738 @property
1739 1739 def _config(self):
1740 1740 """
1741 1741 Returns db based config object.
1742 1742 """
1743 1743 from rhodecode.lib.utils import make_db_config
1744 1744 return make_db_config(clear_session=False, repo=self)
1745 1745
1746 1746 def permissions(self, with_admins=True, with_owner=True):
1747 1747 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1748 1748 q = q.options(joinedload(UserRepoToPerm.repository),
1749 1749 joinedload(UserRepoToPerm.user),
1750 1750 joinedload(UserRepoToPerm.permission),)
1751 1751
1752 1752 # get owners and admins and permissions. We do a trick of re-writing
1753 1753 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1754 1754 # has a global reference and changing one object propagates to all
1755 1755 # others. This means if admin is also an owner admin_row that change
1756 1756 # would propagate to both objects
1757 1757 perm_rows = []
1758 1758 for _usr in q.all():
1759 1759 usr = AttributeDict(_usr.user.get_dict())
1760 1760 usr.permission = _usr.permission.permission_name
1761 1761 perm_rows.append(usr)
1762 1762
1763 1763 # filter the perm rows by 'default' first and then sort them by
1764 1764 # admin,write,read,none permissions sorted again alphabetically in
1765 1765 # each group
1766 1766 perm_rows = sorted(perm_rows, key=display_sort)
1767 1767
1768 1768 _admin_perm = 'repository.admin'
1769 1769 owner_row = []
1770 1770 if with_owner:
1771 1771 usr = AttributeDict(self.user.get_dict())
1772 1772 usr.owner_row = True
1773 1773 usr.permission = _admin_perm
1774 1774 owner_row.append(usr)
1775 1775
1776 1776 super_admin_rows = []
1777 1777 if with_admins:
1778 1778 for usr in User.get_all_super_admins():
1779 1779 # if this admin is also owner, don't double the record
1780 1780 if usr.user_id == owner_row[0].user_id:
1781 1781 owner_row[0].admin_row = True
1782 1782 else:
1783 1783 usr = AttributeDict(usr.get_dict())
1784 1784 usr.admin_row = True
1785 1785 usr.permission = _admin_perm
1786 1786 super_admin_rows.append(usr)
1787 1787
1788 1788 return super_admin_rows + owner_row + perm_rows
1789 1789
1790 1790 def permission_user_groups(self):
1791 1791 q = UserGroupRepoToPerm.query().filter(
1792 1792 UserGroupRepoToPerm.repository == self)
1793 1793 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1794 1794 joinedload(UserGroupRepoToPerm.users_group),
1795 1795 joinedload(UserGroupRepoToPerm.permission),)
1796 1796
1797 1797 perm_rows = []
1798 1798 for _user_group in q.all():
1799 1799 usr = AttributeDict(_user_group.users_group.get_dict())
1800 1800 usr.permission = _user_group.permission.permission_name
1801 1801 perm_rows.append(usr)
1802 1802
1803 1803 return perm_rows
1804 1804
1805 1805 def get_api_data(self, include_secrets=False):
1806 1806 """
1807 1807 Common function for generating repo api data
1808 1808
1809 1809 :param include_secrets: See :meth:`User.get_api_data`.
1810 1810
1811 1811 """
1812 1812 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1813 1813 # move this methods on models level.
1814 1814 from rhodecode.model.settings import SettingsModel
1815 1815 from rhodecode.model.repo import RepoModel
1816 1816
1817 1817 repo = self
1818 1818 _user_id, _time, _reason = self.locked
1819 1819
1820 1820 data = {
1821 1821 'repo_id': repo.repo_id,
1822 1822 'repo_name': repo.repo_name,
1823 1823 'repo_type': repo.repo_type,
1824 1824 'clone_uri': repo.clone_uri or '',
1825 1825 'url': RepoModel().get_url(self),
1826 1826 'private': repo.private,
1827 1827 'created_on': repo.created_on,
1828 1828 'description': repo.description_safe,
1829 1829 'landing_rev': repo.landing_rev,
1830 1830 'owner': repo.user.username,
1831 1831 'fork_of': repo.fork.repo_name if repo.fork else None,
1832 1832 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1833 1833 'enable_statistics': repo.enable_statistics,
1834 1834 'enable_locking': repo.enable_locking,
1835 1835 'enable_downloads': repo.enable_downloads,
1836 1836 'last_changeset': repo.changeset_cache,
1837 1837 'locked_by': User.get(_user_id).get_api_data(
1838 1838 include_secrets=include_secrets) if _user_id else None,
1839 1839 'locked_date': time_to_datetime(_time) if _time else None,
1840 1840 'lock_reason': _reason if _reason else None,
1841 1841 }
1842 1842
1843 1843 # TODO: mikhail: should be per-repo settings here
1844 1844 rc_config = SettingsModel().get_all_settings()
1845 1845 repository_fields = str2bool(
1846 1846 rc_config.get('rhodecode_repository_fields'))
1847 1847 if repository_fields:
1848 1848 for f in self.extra_fields:
1849 1849 data[f.field_key_prefixed] = f.field_value
1850 1850
1851 1851 return data
1852 1852
1853 1853 @classmethod
1854 1854 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1855 1855 if not lock_time:
1856 1856 lock_time = time.time()
1857 1857 if not lock_reason:
1858 1858 lock_reason = cls.LOCK_AUTOMATIC
1859 1859 repo.locked = [user_id, lock_time, lock_reason]
1860 1860 Session().add(repo)
1861 1861 Session().commit()
1862 1862
1863 1863 @classmethod
1864 1864 def unlock(cls, repo):
1865 1865 repo.locked = None
1866 1866 Session().add(repo)
1867 1867 Session().commit()
1868 1868
1869 1869 @classmethod
1870 1870 def getlock(cls, repo):
1871 1871 return repo.locked
1872 1872
1873 1873 def is_user_lock(self, user_id):
1874 1874 if self.lock[0]:
1875 1875 lock_user_id = safe_int(self.lock[0])
1876 1876 user_id = safe_int(user_id)
1877 1877 # both are ints, and they are equal
1878 1878 return all([lock_user_id, user_id]) and lock_user_id == user_id
1879 1879
1880 1880 return False
1881 1881
1882 1882 def get_locking_state(self, action, user_id, only_when_enabled=True):
1883 1883 """
1884 1884 Checks locking on this repository, if locking is enabled and lock is
1885 1885 present returns a tuple of make_lock, locked, locked_by.
1886 1886 make_lock can have 3 states None (do nothing) True, make lock
1887 1887 False release lock, This value is later propagated to hooks, which
1888 1888 do the locking. Think about this as signals passed to hooks what to do.
1889 1889
1890 1890 """
1891 1891 # TODO: johbo: This is part of the business logic and should be moved
1892 1892 # into the RepositoryModel.
1893 1893
1894 1894 if action not in ('push', 'pull'):
1895 1895 raise ValueError("Invalid action value: %s" % repr(action))
1896 1896
1897 1897 # defines if locked error should be thrown to user
1898 1898 currently_locked = False
1899 1899 # defines if new lock should be made, tri-state
1900 1900 make_lock = None
1901 1901 repo = self
1902 1902 user = User.get(user_id)
1903 1903
1904 1904 lock_info = repo.locked
1905 1905
1906 1906 if repo and (repo.enable_locking or not only_when_enabled):
1907 1907 if action == 'push':
1908 1908 # check if it's already locked !, if it is compare users
1909 1909 locked_by_user_id = lock_info[0]
1910 1910 if user.user_id == locked_by_user_id:
1911 1911 log.debug(
1912 1912 'Got `push` action from user %s, now unlocking', user)
1913 1913 # unlock if we have push from user who locked
1914 1914 make_lock = False
1915 1915 else:
1916 1916 # we're not the same user who locked, ban with
1917 1917 # code defined in settings (default is 423 HTTP Locked) !
1918 1918 log.debug('Repo %s is currently locked by %s', repo, user)
1919 1919 currently_locked = True
1920 1920 elif action == 'pull':
1921 1921 # [0] user [1] date
1922 1922 if lock_info[0] and lock_info[1]:
1923 1923 log.debug('Repo %s is currently locked by %s', repo, user)
1924 1924 currently_locked = True
1925 1925 else:
1926 1926 log.debug('Setting lock on repo %s by %s', repo, user)
1927 1927 make_lock = True
1928 1928
1929 1929 else:
1930 1930 log.debug('Repository %s do not have locking enabled', repo)
1931 1931
1932 1932 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1933 1933 make_lock, currently_locked, lock_info)
1934 1934
1935 1935 from rhodecode.lib.auth import HasRepoPermissionAny
1936 1936 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1937 1937 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1938 1938 # if we don't have at least write permission we cannot make a lock
1939 1939 log.debug('lock state reset back to FALSE due to lack '
1940 1940 'of at least read permission')
1941 1941 make_lock = False
1942 1942
1943 1943 return make_lock, currently_locked, lock_info
1944 1944
1945 1945 @property
1946 1946 def last_db_change(self):
1947 1947 return self.updated_on
1948 1948
1949 1949 @property
1950 1950 def clone_uri_hidden(self):
1951 1951 clone_uri = self.clone_uri
1952 1952 if clone_uri:
1953 1953 import urlobject
1954 1954 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
1955 1955 if url_obj.password:
1956 1956 clone_uri = url_obj.with_password('*****')
1957 1957 return clone_uri
1958 1958
1959 1959 def clone_url(self, **override):
1960 1960 from rhodecode.model.settings import SettingsModel
1961 1961
1962 1962 uri_tmpl = None
1963 1963 if 'with_id' in override:
1964 1964 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1965 1965 del override['with_id']
1966 1966
1967 1967 if 'uri_tmpl' in override:
1968 1968 uri_tmpl = override['uri_tmpl']
1969 1969 del override['uri_tmpl']
1970 1970
1971 1971 # we didn't override our tmpl from **overrides
1972 1972 if not uri_tmpl:
1973 1973 rc_config = SettingsModel().get_all_settings(cache=True)
1974 1974 uri_tmpl = rc_config.get(
1975 1975 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
1976 1976
1977 1977 request = get_current_request()
1978 1978 return get_clone_url(request=request,
1979 1979 uri_tmpl=uri_tmpl,
1980 1980 repo_name=self.repo_name,
1981 1981 repo_id=self.repo_id, **override)
1982 1982
1983 1983 def set_state(self, state):
1984 1984 self.repo_state = state
1985 1985 Session().add(self)
1986 1986 #==========================================================================
1987 1987 # SCM PROPERTIES
1988 1988 #==========================================================================
1989 1989
1990 1990 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1991 1991 return get_commit_safe(
1992 1992 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1993 1993
1994 1994 def get_changeset(self, rev=None, pre_load=None):
1995 1995 warnings.warn("Use get_commit", DeprecationWarning)
1996 1996 commit_id = None
1997 1997 commit_idx = None
1998 1998 if isinstance(rev, basestring):
1999 1999 commit_id = rev
2000 2000 else:
2001 2001 commit_idx = rev
2002 2002 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2003 2003 pre_load=pre_load)
2004 2004
2005 2005 def get_landing_commit(self):
2006 2006 """
2007 2007 Returns landing commit, or if that doesn't exist returns the tip
2008 2008 """
2009 2009 _rev_type, _rev = self.landing_rev
2010 2010 commit = self.get_commit(_rev)
2011 2011 if isinstance(commit, EmptyCommit):
2012 2012 return self.get_commit()
2013 2013 return commit
2014 2014
2015 2015 def update_commit_cache(self, cs_cache=None, config=None):
2016 2016 """
2017 2017 Update cache of last changeset for repository, keys should be::
2018 2018
2019 2019 short_id
2020 2020 raw_id
2021 2021 revision
2022 2022 parents
2023 2023 message
2024 2024 date
2025 2025 author
2026 2026
2027 2027 :param cs_cache:
2028 2028 """
2029 2029 from rhodecode.lib.vcs.backends.base import BaseChangeset
2030 2030 if cs_cache is None:
2031 2031 # use no-cache version here
2032 2032 scm_repo = self.scm_instance(cache=False, config=config)
2033 2033 if scm_repo:
2034 2034 cs_cache = scm_repo.get_commit(
2035 2035 pre_load=["author", "date", "message", "parents"])
2036 2036 else:
2037 2037 cs_cache = EmptyCommit()
2038 2038
2039 2039 if isinstance(cs_cache, BaseChangeset):
2040 2040 cs_cache = cs_cache.__json__()
2041 2041
2042 2042 def is_outdated(new_cs_cache):
2043 2043 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2044 2044 new_cs_cache['revision'] != self.changeset_cache['revision']):
2045 2045 return True
2046 2046 return False
2047 2047
2048 2048 # check if we have maybe already latest cached revision
2049 2049 if is_outdated(cs_cache) or not self.changeset_cache:
2050 2050 _default = datetime.datetime.fromtimestamp(0)
2051 2051 last_change = cs_cache.get('date') or _default
2052 2052 log.debug('updated repo %s with new cs cache %s',
2053 2053 self.repo_name, cs_cache)
2054 2054 self.updated_on = last_change
2055 2055 self.changeset_cache = cs_cache
2056 2056 Session().add(self)
2057 2057 Session().commit()
2058 2058 else:
2059 2059 log.debug('Skipping update_commit_cache for repo:`%s` '
2060 2060 'commit already with latest changes', self.repo_name)
2061 2061
2062 2062 @property
2063 2063 def tip(self):
2064 2064 return self.get_commit('tip')
2065 2065
2066 2066 @property
2067 2067 def author(self):
2068 2068 return self.tip.author
2069 2069
2070 2070 @property
2071 2071 def last_change(self):
2072 2072 return self.scm_instance().last_change
2073 2073
2074 2074 def get_comments(self, revisions=None):
2075 2075 """
2076 2076 Returns comments for this repository grouped by revisions
2077 2077
2078 2078 :param revisions: filter query by revisions only
2079 2079 """
2080 2080 cmts = ChangesetComment.query()\
2081 2081 .filter(ChangesetComment.repo == self)
2082 2082 if revisions:
2083 2083 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2084 2084 grouped = collections.defaultdict(list)
2085 2085 for cmt in cmts.all():
2086 2086 grouped[cmt.revision].append(cmt)
2087 2087 return grouped
2088 2088
2089 2089 def statuses(self, revisions=None):
2090 2090 """
2091 2091 Returns statuses for this repository
2092 2092
2093 2093 :param revisions: list of revisions to get statuses for
2094 2094 """
2095 2095 statuses = ChangesetStatus.query()\
2096 2096 .filter(ChangesetStatus.repo == self)\
2097 2097 .filter(ChangesetStatus.version == 0)
2098 2098
2099 2099 if revisions:
2100 2100 # Try doing the filtering in chunks to avoid hitting limits
2101 2101 size = 500
2102 2102 status_results = []
2103 2103 for chunk in xrange(0, len(revisions), size):
2104 2104 status_results += statuses.filter(
2105 2105 ChangesetStatus.revision.in_(
2106 2106 revisions[chunk: chunk+size])
2107 2107 ).all()
2108 2108 else:
2109 2109 status_results = statuses.all()
2110 2110
2111 2111 grouped = {}
2112 2112
2113 2113 # maybe we have open new pullrequest without a status?
2114 2114 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2115 2115 status_lbl = ChangesetStatus.get_status_lbl(stat)
2116 2116 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2117 2117 for rev in pr.revisions:
2118 2118 pr_id = pr.pull_request_id
2119 2119 pr_repo = pr.target_repo.repo_name
2120 2120 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2121 2121
2122 2122 for stat in status_results:
2123 2123 pr_id = pr_repo = None
2124 2124 if stat.pull_request:
2125 2125 pr_id = stat.pull_request.pull_request_id
2126 2126 pr_repo = stat.pull_request.target_repo.repo_name
2127 2127 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2128 2128 pr_id, pr_repo]
2129 2129 return grouped
2130 2130
2131 2131 # ==========================================================================
2132 2132 # SCM CACHE INSTANCE
2133 2133 # ==========================================================================
2134 2134
2135 2135 def scm_instance(self, **kwargs):
2136 2136 import rhodecode
2137 2137
2138 2138 # Passing a config will not hit the cache currently only used
2139 2139 # for repo2dbmapper
2140 2140 config = kwargs.pop('config', None)
2141 2141 cache = kwargs.pop('cache', None)
2142 2142 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2143 2143 # if cache is NOT defined use default global, else we have a full
2144 2144 # control over cache behaviour
2145 2145 if cache is None and full_cache and not config:
2146 2146 return self._get_instance_cached()
2147 2147 return self._get_instance(cache=bool(cache), config=config)
2148 2148
2149 2149 def _get_instance_cached(self):
2150 2150 @cache_region('long_term')
2151 2151 def _get_repo(cache_key):
2152 2152 return self._get_instance()
2153 2153
2154 2154 invalidator_context = CacheKey.repo_context_cache(
2155 2155 _get_repo, self.repo_name, None, thread_scoped=True)
2156 2156
2157 2157 with invalidator_context as context:
2158 2158 context.invalidate()
2159 2159 repo = context.compute()
2160 2160
2161 2161 return repo
2162 2162
2163 2163 def _get_instance(self, cache=True, config=None):
2164 2164 config = config or self._config
2165 2165 custom_wire = {
2166 2166 'cache': cache # controls the vcs.remote cache
2167 2167 }
2168 2168 repo = get_vcs_instance(
2169 2169 repo_path=safe_str(self.repo_full_path),
2170 2170 config=config,
2171 2171 with_wire=custom_wire,
2172 2172 create=False,
2173 2173 _vcs_alias=self.repo_type)
2174 2174
2175 2175 return repo
2176 2176
2177 2177 def __json__(self):
2178 2178 return {'landing_rev': self.landing_rev}
2179 2179
2180 2180 def get_dict(self):
2181 2181
2182 2182 # Since we transformed `repo_name` to a hybrid property, we need to
2183 2183 # keep compatibility with the code which uses `repo_name` field.
2184 2184
2185 2185 result = super(Repository, self).get_dict()
2186 2186 result['repo_name'] = result.pop('_repo_name', None)
2187 2187 return result
2188 2188
2189 2189
2190 2190 class RepoGroup(Base, BaseModel):
2191 2191 __tablename__ = 'groups'
2192 2192 __table_args__ = (
2193 2193 UniqueConstraint('group_name', 'group_parent_id'),
2194 2194 CheckConstraint('group_id != group_parent_id'),
2195 2195 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2196 2196 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2197 2197 )
2198 2198 __mapper_args__ = {'order_by': 'group_name'}
2199 2199
2200 2200 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2201 2201
2202 2202 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2203 2203 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2204 2204 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2205 2205 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2206 2206 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2207 2207 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2208 2208 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2209 2209 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2210 2210
2211 2211 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2212 2212 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2213 2213 parent_group = relationship('RepoGroup', remote_side=group_id)
2214 2214 user = relationship('User')
2215 2215 integrations = relationship('Integration',
2216 2216 cascade="all, delete, delete-orphan")
2217 2217
2218 2218 def __init__(self, group_name='', parent_group=None):
2219 2219 self.group_name = group_name
2220 2220 self.parent_group = parent_group
2221 2221
2222 2222 def __unicode__(self):
2223 2223 return u"<%s('id:%s:%s')>" % (
2224 2224 self.__class__.__name__, self.group_id, self.group_name)
2225 2225
2226 2226 @hybrid_property
2227 2227 def description_safe(self):
2228 2228 from rhodecode.lib import helpers as h
2229 2229 return h.escape(self.group_description)
2230 2230
2231 2231 @classmethod
2232 2232 def _generate_choice(cls, repo_group):
2233 2233 from webhelpers.html import literal as _literal
2234 2234 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2235 2235 return repo_group.group_id, _name(repo_group.full_path_splitted)
2236 2236
2237 2237 @classmethod
2238 2238 def groups_choices(cls, groups=None, show_empty_group=True):
2239 2239 if not groups:
2240 2240 groups = cls.query().all()
2241 2241
2242 2242 repo_groups = []
2243 2243 if show_empty_group:
2244 2244 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2245 2245
2246 2246 repo_groups.extend([cls._generate_choice(x) for x in groups])
2247 2247
2248 2248 repo_groups = sorted(
2249 2249 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2250 2250 return repo_groups
2251 2251
2252 2252 @classmethod
2253 2253 def url_sep(cls):
2254 2254 return URL_SEP
2255 2255
2256 2256 @classmethod
2257 2257 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2258 2258 if case_insensitive:
2259 2259 gr = cls.query().filter(func.lower(cls.group_name)
2260 2260 == func.lower(group_name))
2261 2261 else:
2262 2262 gr = cls.query().filter(cls.group_name == group_name)
2263 2263 if cache:
2264 2264 name_key = _hash_key(group_name)
2265 2265 gr = gr.options(
2266 2266 FromCache("sql_cache_short", "get_group_%s" % name_key))
2267 2267 return gr.scalar()
2268 2268
2269 2269 @classmethod
2270 2270 def get_user_personal_repo_group(cls, user_id):
2271 2271 user = User.get(user_id)
2272 2272 if user.username == User.DEFAULT_USER:
2273 2273 return None
2274 2274
2275 2275 return cls.query()\
2276 2276 .filter(cls.personal == true()) \
2277 2277 .filter(cls.user == user).scalar()
2278 2278
2279 2279 @classmethod
2280 2280 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2281 2281 case_insensitive=True):
2282 2282 q = RepoGroup.query()
2283 2283
2284 2284 if not isinstance(user_id, Optional):
2285 2285 q = q.filter(RepoGroup.user_id == user_id)
2286 2286
2287 2287 if not isinstance(group_id, Optional):
2288 2288 q = q.filter(RepoGroup.group_parent_id == group_id)
2289 2289
2290 2290 if case_insensitive:
2291 2291 q = q.order_by(func.lower(RepoGroup.group_name))
2292 2292 else:
2293 2293 q = q.order_by(RepoGroup.group_name)
2294 2294 return q.all()
2295 2295
2296 2296 @property
2297 2297 def parents(self):
2298 2298 parents_recursion_limit = 10
2299 2299 groups = []
2300 2300 if self.parent_group is None:
2301 2301 return groups
2302 2302 cur_gr = self.parent_group
2303 2303 groups.insert(0, cur_gr)
2304 2304 cnt = 0
2305 2305 while 1:
2306 2306 cnt += 1
2307 2307 gr = getattr(cur_gr, 'parent_group', None)
2308 2308 cur_gr = cur_gr.parent_group
2309 2309 if gr is None:
2310 2310 break
2311 2311 if cnt == parents_recursion_limit:
2312 2312 # this will prevent accidental infinit loops
2313 2313 log.error(('more than %s parents found for group %s, stopping '
2314 2314 'recursive parent fetching' % (parents_recursion_limit, self)))
2315 2315 break
2316 2316
2317 2317 groups.insert(0, gr)
2318 2318 return groups
2319 2319
2320 2320 @property
2321 2321 def children(self):
2322 2322 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2323 2323
2324 2324 @property
2325 2325 def name(self):
2326 2326 return self.group_name.split(RepoGroup.url_sep())[-1]
2327 2327
2328 2328 @property
2329 2329 def full_path(self):
2330 2330 return self.group_name
2331 2331
2332 2332 @property
2333 2333 def full_path_splitted(self):
2334 2334 return self.group_name.split(RepoGroup.url_sep())
2335 2335
2336 2336 @property
2337 2337 def repositories(self):
2338 2338 return Repository.query()\
2339 2339 .filter(Repository.group == self)\
2340 2340 .order_by(Repository.repo_name)
2341 2341
2342 2342 @property
2343 2343 def repositories_recursive_count(self):
2344 2344 cnt = self.repositories.count()
2345 2345
2346 2346 def children_count(group):
2347 2347 cnt = 0
2348 2348 for child in group.children:
2349 2349 cnt += child.repositories.count()
2350 2350 cnt += children_count(child)
2351 2351 return cnt
2352 2352
2353 2353 return cnt + children_count(self)
2354 2354
2355 2355 def _recursive_objects(self, include_repos=True):
2356 2356 all_ = []
2357 2357
2358 2358 def _get_members(root_gr):
2359 2359 if include_repos:
2360 2360 for r in root_gr.repositories:
2361 2361 all_.append(r)
2362 2362 childs = root_gr.children.all()
2363 2363 if childs:
2364 2364 for gr in childs:
2365 2365 all_.append(gr)
2366 2366 _get_members(gr)
2367 2367
2368 2368 _get_members(self)
2369 2369 return [self] + all_
2370 2370
2371 2371 def recursive_groups_and_repos(self):
2372 2372 """
2373 2373 Recursive return all groups, with repositories in those groups
2374 2374 """
2375 2375 return self._recursive_objects()
2376 2376
2377 2377 def recursive_groups(self):
2378 2378 """
2379 2379 Returns all children groups for this group including children of children
2380 2380 """
2381 2381 return self._recursive_objects(include_repos=False)
2382 2382
2383 2383 def get_new_name(self, group_name):
2384 2384 """
2385 2385 returns new full group name based on parent and new name
2386 2386
2387 2387 :param group_name:
2388 2388 """
2389 2389 path_prefix = (self.parent_group.full_path_splitted if
2390 2390 self.parent_group else [])
2391 2391 return RepoGroup.url_sep().join(path_prefix + [group_name])
2392 2392
2393 2393 def permissions(self, with_admins=True, with_owner=True):
2394 2394 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2395 2395 q = q.options(joinedload(UserRepoGroupToPerm.group),
2396 2396 joinedload(UserRepoGroupToPerm.user),
2397 2397 joinedload(UserRepoGroupToPerm.permission),)
2398 2398
2399 2399 # get owners and admins and permissions. We do a trick of re-writing
2400 2400 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2401 2401 # has a global reference and changing one object propagates to all
2402 2402 # others. This means if admin is also an owner admin_row that change
2403 2403 # would propagate to both objects
2404 2404 perm_rows = []
2405 2405 for _usr in q.all():
2406 2406 usr = AttributeDict(_usr.user.get_dict())
2407 2407 usr.permission = _usr.permission.permission_name
2408 2408 perm_rows.append(usr)
2409 2409
2410 2410 # filter the perm rows by 'default' first and then sort them by
2411 2411 # admin,write,read,none permissions sorted again alphabetically in
2412 2412 # each group
2413 2413 perm_rows = sorted(perm_rows, key=display_sort)
2414 2414
2415 2415 _admin_perm = 'group.admin'
2416 2416 owner_row = []
2417 2417 if with_owner:
2418 2418 usr = AttributeDict(self.user.get_dict())
2419 2419 usr.owner_row = True
2420 2420 usr.permission = _admin_perm
2421 2421 owner_row.append(usr)
2422 2422
2423 2423 super_admin_rows = []
2424 2424 if with_admins:
2425 2425 for usr in User.get_all_super_admins():
2426 2426 # if this admin is also owner, don't double the record
2427 2427 if usr.user_id == owner_row[0].user_id:
2428 2428 owner_row[0].admin_row = True
2429 2429 else:
2430 2430 usr = AttributeDict(usr.get_dict())
2431 2431 usr.admin_row = True
2432 2432 usr.permission = _admin_perm
2433 2433 super_admin_rows.append(usr)
2434 2434
2435 2435 return super_admin_rows + owner_row + perm_rows
2436 2436
2437 2437 def permission_user_groups(self):
2438 2438 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2439 2439 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2440 2440 joinedload(UserGroupRepoGroupToPerm.users_group),
2441 2441 joinedload(UserGroupRepoGroupToPerm.permission),)
2442 2442
2443 2443 perm_rows = []
2444 2444 for _user_group in q.all():
2445 2445 usr = AttributeDict(_user_group.users_group.get_dict())
2446 2446 usr.permission = _user_group.permission.permission_name
2447 2447 perm_rows.append(usr)
2448 2448
2449 2449 return perm_rows
2450 2450
2451 2451 def get_api_data(self):
2452 2452 """
2453 2453 Common function for generating api data
2454 2454
2455 2455 """
2456 2456 group = self
2457 2457 data = {
2458 2458 'group_id': group.group_id,
2459 2459 'group_name': group.group_name,
2460 2460 'group_description': group.description_safe,
2461 2461 'parent_group': group.parent_group.group_name if group.parent_group else None,
2462 2462 'repositories': [x.repo_name for x in group.repositories],
2463 2463 'owner': group.user.username,
2464 2464 }
2465 2465 return data
2466 2466
2467 2467
2468 2468 class Permission(Base, BaseModel):
2469 2469 __tablename__ = 'permissions'
2470 2470 __table_args__ = (
2471 2471 Index('p_perm_name_idx', 'permission_name'),
2472 2472 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2473 2473 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2474 2474 )
2475 2475 PERMS = [
2476 2476 ('hg.admin', _('RhodeCode Super Administrator')),
2477 2477
2478 2478 ('repository.none', _('Repository no access')),
2479 2479 ('repository.read', _('Repository read access')),
2480 2480 ('repository.write', _('Repository write access')),
2481 2481 ('repository.admin', _('Repository admin access')),
2482 2482
2483 2483 ('group.none', _('Repository group no access')),
2484 2484 ('group.read', _('Repository group read access')),
2485 2485 ('group.write', _('Repository group write access')),
2486 2486 ('group.admin', _('Repository group admin access')),
2487 2487
2488 2488 ('usergroup.none', _('User group no access')),
2489 2489 ('usergroup.read', _('User group read access')),
2490 2490 ('usergroup.write', _('User group write access')),
2491 2491 ('usergroup.admin', _('User group admin access')),
2492 2492
2493 2493 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2494 2494 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2495 2495
2496 2496 ('hg.usergroup.create.false', _('User Group creation disabled')),
2497 2497 ('hg.usergroup.create.true', _('User Group creation enabled')),
2498 2498
2499 2499 ('hg.create.none', _('Repository creation disabled')),
2500 2500 ('hg.create.repository', _('Repository creation enabled')),
2501 2501 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2502 2502 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2503 2503
2504 2504 ('hg.fork.none', _('Repository forking disabled')),
2505 2505 ('hg.fork.repository', _('Repository forking enabled')),
2506 2506
2507 2507 ('hg.register.none', _('Registration disabled')),
2508 2508 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2509 2509 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2510 2510
2511 2511 ('hg.password_reset.enabled', _('Password reset enabled')),
2512 2512 ('hg.password_reset.hidden', _('Password reset hidden')),
2513 2513 ('hg.password_reset.disabled', _('Password reset disabled')),
2514 2514
2515 2515 ('hg.extern_activate.manual', _('Manual activation of external account')),
2516 2516 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2517 2517
2518 2518 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2519 2519 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2520 2520 ]
2521 2521
2522 2522 # definition of system default permissions for DEFAULT user
2523 2523 DEFAULT_USER_PERMISSIONS = [
2524 2524 'repository.read',
2525 2525 'group.read',
2526 2526 'usergroup.read',
2527 2527 'hg.create.repository',
2528 2528 'hg.repogroup.create.false',
2529 2529 'hg.usergroup.create.false',
2530 2530 'hg.create.write_on_repogroup.true',
2531 2531 'hg.fork.repository',
2532 2532 'hg.register.manual_activate',
2533 2533 'hg.password_reset.enabled',
2534 2534 'hg.extern_activate.auto',
2535 2535 'hg.inherit_default_perms.true',
2536 2536 ]
2537 2537
2538 2538 # defines which permissions are more important higher the more important
2539 2539 # Weight defines which permissions are more important.
2540 2540 # The higher number the more important.
2541 2541 PERM_WEIGHTS = {
2542 2542 'repository.none': 0,
2543 2543 'repository.read': 1,
2544 2544 'repository.write': 3,
2545 2545 'repository.admin': 4,
2546 2546
2547 2547 'group.none': 0,
2548 2548 'group.read': 1,
2549 2549 'group.write': 3,
2550 2550 'group.admin': 4,
2551 2551
2552 2552 'usergroup.none': 0,
2553 2553 'usergroup.read': 1,
2554 2554 'usergroup.write': 3,
2555 2555 'usergroup.admin': 4,
2556 2556
2557 2557 'hg.repogroup.create.false': 0,
2558 2558 'hg.repogroup.create.true': 1,
2559 2559
2560 2560 'hg.usergroup.create.false': 0,
2561 2561 'hg.usergroup.create.true': 1,
2562 2562
2563 2563 'hg.fork.none': 0,
2564 2564 'hg.fork.repository': 1,
2565 2565 'hg.create.none': 0,
2566 2566 'hg.create.repository': 1
2567 2567 }
2568 2568
2569 2569 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2570 2570 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2571 2571 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2572 2572
2573 2573 def __unicode__(self):
2574 2574 return u"<%s('%s:%s')>" % (
2575 2575 self.__class__.__name__, self.permission_id, self.permission_name
2576 2576 )
2577 2577
2578 2578 @classmethod
2579 2579 def get_by_key(cls, key):
2580 2580 return cls.query().filter(cls.permission_name == key).scalar()
2581 2581
2582 2582 @classmethod
2583 2583 def get_default_repo_perms(cls, user_id, repo_id=None):
2584 2584 q = Session().query(UserRepoToPerm, Repository, Permission)\
2585 2585 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2586 2586 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2587 2587 .filter(UserRepoToPerm.user_id == user_id)
2588 2588 if repo_id:
2589 2589 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2590 2590 return q.all()
2591 2591
2592 2592 @classmethod
2593 2593 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2594 2594 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2595 2595 .join(
2596 2596 Permission,
2597 2597 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2598 2598 .join(
2599 2599 Repository,
2600 2600 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2601 2601 .join(
2602 2602 UserGroup,
2603 2603 UserGroupRepoToPerm.users_group_id ==
2604 2604 UserGroup.users_group_id)\
2605 2605 .join(
2606 2606 UserGroupMember,
2607 2607 UserGroupRepoToPerm.users_group_id ==
2608 2608 UserGroupMember.users_group_id)\
2609 2609 .filter(
2610 2610 UserGroupMember.user_id == user_id,
2611 2611 UserGroup.users_group_active == true())
2612 2612 if repo_id:
2613 2613 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2614 2614 return q.all()
2615 2615
2616 2616 @classmethod
2617 2617 def get_default_group_perms(cls, user_id, repo_group_id=None):
2618 2618 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2619 2619 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2620 2620 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2621 2621 .filter(UserRepoGroupToPerm.user_id == user_id)
2622 2622 if repo_group_id:
2623 2623 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2624 2624 return q.all()
2625 2625
2626 2626 @classmethod
2627 2627 def get_default_group_perms_from_user_group(
2628 2628 cls, user_id, repo_group_id=None):
2629 2629 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2630 2630 .join(
2631 2631 Permission,
2632 2632 UserGroupRepoGroupToPerm.permission_id ==
2633 2633 Permission.permission_id)\
2634 2634 .join(
2635 2635 RepoGroup,
2636 2636 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2637 2637 .join(
2638 2638 UserGroup,
2639 2639 UserGroupRepoGroupToPerm.users_group_id ==
2640 2640 UserGroup.users_group_id)\
2641 2641 .join(
2642 2642 UserGroupMember,
2643 2643 UserGroupRepoGroupToPerm.users_group_id ==
2644 2644 UserGroupMember.users_group_id)\
2645 2645 .filter(
2646 2646 UserGroupMember.user_id == user_id,
2647 2647 UserGroup.users_group_active == true())
2648 2648 if repo_group_id:
2649 2649 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2650 2650 return q.all()
2651 2651
2652 2652 @classmethod
2653 2653 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2654 2654 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2655 2655 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2656 2656 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2657 2657 .filter(UserUserGroupToPerm.user_id == user_id)
2658 2658 if user_group_id:
2659 2659 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2660 2660 return q.all()
2661 2661
2662 2662 @classmethod
2663 2663 def get_default_user_group_perms_from_user_group(
2664 2664 cls, user_id, user_group_id=None):
2665 2665 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2666 2666 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2667 2667 .join(
2668 2668 Permission,
2669 2669 UserGroupUserGroupToPerm.permission_id ==
2670 2670 Permission.permission_id)\
2671 2671 .join(
2672 2672 TargetUserGroup,
2673 2673 UserGroupUserGroupToPerm.target_user_group_id ==
2674 2674 TargetUserGroup.users_group_id)\
2675 2675 .join(
2676 2676 UserGroup,
2677 2677 UserGroupUserGroupToPerm.user_group_id ==
2678 2678 UserGroup.users_group_id)\
2679 2679 .join(
2680 2680 UserGroupMember,
2681 2681 UserGroupUserGroupToPerm.user_group_id ==
2682 2682 UserGroupMember.users_group_id)\
2683 2683 .filter(
2684 2684 UserGroupMember.user_id == user_id,
2685 2685 UserGroup.users_group_active == true())
2686 2686 if user_group_id:
2687 2687 q = q.filter(
2688 2688 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2689 2689
2690 2690 return q.all()
2691 2691
2692 2692
2693 2693 class UserRepoToPerm(Base, BaseModel):
2694 2694 __tablename__ = 'repo_to_perm'
2695 2695 __table_args__ = (
2696 2696 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2697 2697 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2698 2698 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2699 2699 )
2700 2700 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2701 2701 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2702 2702 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2703 2703 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2704 2704
2705 2705 user = relationship('User')
2706 2706 repository = relationship('Repository')
2707 2707 permission = relationship('Permission')
2708 2708
2709 2709 @classmethod
2710 2710 def create(cls, user, repository, permission):
2711 2711 n = cls()
2712 2712 n.user = user
2713 2713 n.repository = repository
2714 2714 n.permission = permission
2715 2715 Session().add(n)
2716 2716 return n
2717 2717
2718 2718 def __unicode__(self):
2719 2719 return u'<%s => %s >' % (self.user, self.repository)
2720 2720
2721 2721
2722 2722 class UserUserGroupToPerm(Base, BaseModel):
2723 2723 __tablename__ = 'user_user_group_to_perm'
2724 2724 __table_args__ = (
2725 2725 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2726 2726 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2727 2727 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2728 2728 )
2729 2729 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2730 2730 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2731 2731 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2732 2732 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2733 2733
2734 2734 user = relationship('User')
2735 2735 user_group = relationship('UserGroup')
2736 2736 permission = relationship('Permission')
2737 2737
2738 2738 @classmethod
2739 2739 def create(cls, user, user_group, permission):
2740 2740 n = cls()
2741 2741 n.user = user
2742 2742 n.user_group = user_group
2743 2743 n.permission = permission
2744 2744 Session().add(n)
2745 2745 return n
2746 2746
2747 2747 def __unicode__(self):
2748 2748 return u'<%s => %s >' % (self.user, self.user_group)
2749 2749
2750 2750
2751 2751 class UserToPerm(Base, BaseModel):
2752 2752 __tablename__ = 'user_to_perm'
2753 2753 __table_args__ = (
2754 2754 UniqueConstraint('user_id', 'permission_id'),
2755 2755 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2756 2756 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2757 2757 )
2758 2758 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2759 2759 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2760 2760 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2761 2761
2762 2762 user = relationship('User')
2763 2763 permission = relationship('Permission', lazy='joined')
2764 2764
2765 2765 def __unicode__(self):
2766 2766 return u'<%s => %s >' % (self.user, self.permission)
2767 2767
2768 2768
2769 2769 class UserGroupRepoToPerm(Base, BaseModel):
2770 2770 __tablename__ = 'users_group_repo_to_perm'
2771 2771 __table_args__ = (
2772 2772 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2773 2773 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2774 2774 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2775 2775 )
2776 2776 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2777 2777 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2778 2778 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2779 2779 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2780 2780
2781 2781 users_group = relationship('UserGroup')
2782 2782 permission = relationship('Permission')
2783 2783 repository = relationship('Repository')
2784 2784
2785 2785 @classmethod
2786 2786 def create(cls, users_group, repository, permission):
2787 2787 n = cls()
2788 2788 n.users_group = users_group
2789 2789 n.repository = repository
2790 2790 n.permission = permission
2791 2791 Session().add(n)
2792 2792 return n
2793 2793
2794 2794 def __unicode__(self):
2795 2795 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2796 2796
2797 2797
2798 2798 class UserGroupUserGroupToPerm(Base, BaseModel):
2799 2799 __tablename__ = 'user_group_user_group_to_perm'
2800 2800 __table_args__ = (
2801 2801 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2802 2802 CheckConstraint('target_user_group_id != user_group_id'),
2803 2803 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2804 2804 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2805 2805 )
2806 2806 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2807 2807 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2808 2808 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2809 2809 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2810 2810
2811 2811 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2812 2812 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2813 2813 permission = relationship('Permission')
2814 2814
2815 2815 @classmethod
2816 2816 def create(cls, target_user_group, user_group, permission):
2817 2817 n = cls()
2818 2818 n.target_user_group = target_user_group
2819 2819 n.user_group = user_group
2820 2820 n.permission = permission
2821 2821 Session().add(n)
2822 2822 return n
2823 2823
2824 2824 def __unicode__(self):
2825 2825 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2826 2826
2827 2827
2828 2828 class UserGroupToPerm(Base, BaseModel):
2829 2829 __tablename__ = 'users_group_to_perm'
2830 2830 __table_args__ = (
2831 2831 UniqueConstraint('users_group_id', 'permission_id',),
2832 2832 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2833 2833 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2834 2834 )
2835 2835 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2836 2836 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2837 2837 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2838 2838
2839 2839 users_group = relationship('UserGroup')
2840 2840 permission = relationship('Permission')
2841 2841
2842 2842
2843 2843 class UserRepoGroupToPerm(Base, BaseModel):
2844 2844 __tablename__ = 'user_repo_group_to_perm'
2845 2845 __table_args__ = (
2846 2846 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2847 2847 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2848 2848 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2849 2849 )
2850 2850
2851 2851 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2852 2852 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2853 2853 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2854 2854 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2855 2855
2856 2856 user = relationship('User')
2857 2857 group = relationship('RepoGroup')
2858 2858 permission = relationship('Permission')
2859 2859
2860 2860 @classmethod
2861 2861 def create(cls, user, repository_group, permission):
2862 2862 n = cls()
2863 2863 n.user = user
2864 2864 n.group = repository_group
2865 2865 n.permission = permission
2866 2866 Session().add(n)
2867 2867 return n
2868 2868
2869 2869
2870 2870 class UserGroupRepoGroupToPerm(Base, BaseModel):
2871 2871 __tablename__ = 'users_group_repo_group_to_perm'
2872 2872 __table_args__ = (
2873 2873 UniqueConstraint('users_group_id', 'group_id'),
2874 2874 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2875 2875 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2876 2876 )
2877 2877
2878 2878 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2879 2879 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2880 2880 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2881 2881 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2882 2882
2883 2883 users_group = relationship('UserGroup')
2884 2884 permission = relationship('Permission')
2885 2885 group = relationship('RepoGroup')
2886 2886
2887 2887 @classmethod
2888 2888 def create(cls, user_group, repository_group, permission):
2889 2889 n = cls()
2890 2890 n.users_group = user_group
2891 2891 n.group = repository_group
2892 2892 n.permission = permission
2893 2893 Session().add(n)
2894 2894 return n
2895 2895
2896 2896 def __unicode__(self):
2897 2897 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2898 2898
2899 2899
2900 2900 class Statistics(Base, BaseModel):
2901 2901 __tablename__ = 'statistics'
2902 2902 __table_args__ = (
2903 2903 UniqueConstraint('repository_id'),
2904 2904 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2905 2905 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2906 2906 )
2907 2907 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2908 2908 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2909 2909 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2910 2910 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2911 2911 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2912 2912 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2913 2913
2914 2914 repository = relationship('Repository', single_parent=True)
2915 2915
2916 2916
2917 2917 class UserFollowing(Base, BaseModel):
2918 2918 __tablename__ = 'user_followings'
2919 2919 __table_args__ = (
2920 2920 UniqueConstraint('user_id', 'follows_repository_id'),
2921 2921 UniqueConstraint('user_id', 'follows_user_id'),
2922 2922 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2923 2923 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2924 2924 )
2925 2925
2926 2926 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2927 2927 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2928 2928 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2929 2929 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2930 2930 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2931 2931
2932 2932 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2933 2933
2934 2934 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2935 2935 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2936 2936
2937 2937 @classmethod
2938 2938 def get_repo_followers(cls, repo_id):
2939 2939 return cls.query().filter(cls.follows_repo_id == repo_id)
2940 2940
2941 2941
2942 2942 class CacheKey(Base, BaseModel):
2943 2943 __tablename__ = 'cache_invalidation'
2944 2944 __table_args__ = (
2945 2945 UniqueConstraint('cache_key'),
2946 2946 Index('key_idx', 'cache_key'),
2947 2947 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2948 2948 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2949 2949 )
2950 2950 CACHE_TYPE_ATOM = 'ATOM'
2951 2951 CACHE_TYPE_RSS = 'RSS'
2952 2952 CACHE_TYPE_README = 'README'
2953 2953
2954 2954 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2955 2955 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2956 2956 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2957 2957 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2958 2958
2959 2959 def __init__(self, cache_key, cache_args=''):
2960 2960 self.cache_key = cache_key
2961 2961 self.cache_args = cache_args
2962 2962 self.cache_active = False
2963 2963
2964 2964 def __unicode__(self):
2965 2965 return u"<%s('%s:%s[%s]')>" % (
2966 2966 self.__class__.__name__,
2967 2967 self.cache_id, self.cache_key, self.cache_active)
2968 2968
2969 2969 def _cache_key_partition(self):
2970 2970 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2971 2971 return prefix, repo_name, suffix
2972 2972
2973 2973 def get_prefix(self):
2974 2974 """
2975 2975 Try to extract prefix from existing cache key. The key could consist
2976 2976 of prefix, repo_name, suffix
2977 2977 """
2978 2978 # this returns prefix, repo_name, suffix
2979 2979 return self._cache_key_partition()[0]
2980 2980
2981 2981 def get_suffix(self):
2982 2982 """
2983 2983 get suffix that might have been used in _get_cache_key to
2984 2984 generate self.cache_key. Only used for informational purposes
2985 2985 in repo_edit.mako.
2986 2986 """
2987 2987 # prefix, repo_name, suffix
2988 2988 return self._cache_key_partition()[2]
2989 2989
2990 2990 @classmethod
2991 2991 def delete_all_cache(cls):
2992 2992 """
2993 2993 Delete all cache keys from database.
2994 2994 Should only be run when all instances are down and all entries
2995 2995 thus stale.
2996 2996 """
2997 2997 cls.query().delete()
2998 2998 Session().commit()
2999 2999
3000 3000 @classmethod
3001 3001 def get_cache_key(cls, repo_name, cache_type):
3002 3002 """
3003 3003
3004 3004 Generate a cache key for this process of RhodeCode instance.
3005 3005 Prefix most likely will be process id or maybe explicitly set
3006 3006 instance_id from .ini file.
3007 3007 """
3008 3008 import rhodecode
3009 3009 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3010 3010
3011 3011 repo_as_unicode = safe_unicode(repo_name)
3012 3012 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3013 3013 if cache_type else repo_as_unicode
3014 3014
3015 3015 return u'{}{}'.format(prefix, key)
3016 3016
3017 3017 @classmethod
3018 3018 def set_invalidate(cls, repo_name, delete=False):
3019 3019 """
3020 3020 Mark all caches of a repo as invalid in the database.
3021 3021 """
3022 3022
3023 3023 try:
3024 3024 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3025 3025 if delete:
3026 3026 log.debug('cache objects deleted for repo %s',
3027 3027 safe_str(repo_name))
3028 3028 qry.delete()
3029 3029 else:
3030 3030 log.debug('cache objects marked as invalid for repo %s',
3031 3031 safe_str(repo_name))
3032 3032 qry.update({"cache_active": False})
3033 3033
3034 3034 Session().commit()
3035 3035 except Exception:
3036 3036 log.exception(
3037 3037 'Cache key invalidation failed for repository %s',
3038 3038 safe_str(repo_name))
3039 3039 Session().rollback()
3040 3040
3041 3041 @classmethod
3042 3042 def get_active_cache(cls, cache_key):
3043 3043 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3044 3044 if inv_obj:
3045 3045 return inv_obj
3046 3046 return None
3047 3047
3048 3048 @classmethod
3049 3049 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3050 3050 thread_scoped=False):
3051 3051 """
3052 3052 @cache_region('long_term')
3053 3053 def _heavy_calculation(cache_key):
3054 3054 return 'result'
3055 3055
3056 3056 cache_context = CacheKey.repo_context_cache(
3057 3057 _heavy_calculation, repo_name, cache_type)
3058 3058
3059 3059 with cache_context as context:
3060 3060 context.invalidate()
3061 3061 computed = context.compute()
3062 3062
3063 3063 assert computed == 'result'
3064 3064 """
3065 3065 from rhodecode.lib import caches
3066 3066 return caches.InvalidationContext(
3067 3067 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3068 3068
3069 3069
3070 3070 class ChangesetComment(Base, BaseModel):
3071 3071 __tablename__ = 'changeset_comments'
3072 3072 __table_args__ = (
3073 3073 Index('cc_revision_idx', 'revision'),
3074 3074 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3075 3075 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3076 3076 )
3077 3077
3078 3078 COMMENT_OUTDATED = u'comment_outdated'
3079 3079 COMMENT_TYPE_NOTE = u'note'
3080 3080 COMMENT_TYPE_TODO = u'todo'
3081 3081 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3082 3082
3083 3083 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3084 3084 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3085 3085 revision = Column('revision', String(40), nullable=True)
3086 3086 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3087 3087 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3088 3088 line_no = Column('line_no', Unicode(10), nullable=True)
3089 3089 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3090 3090 f_path = Column('f_path', Unicode(1000), nullable=True)
3091 3091 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3092 3092 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3093 3093 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3094 3094 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3095 3095 renderer = Column('renderer', Unicode(64), nullable=True)
3096 3096 display_state = Column('display_state', Unicode(128), nullable=True)
3097 3097
3098 3098 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3099 3099 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3100 3100 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3101 3101 author = relationship('User', lazy='joined')
3102 3102 repo = relationship('Repository')
3103 3103 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3104 3104 pull_request = relationship('PullRequest', lazy='joined')
3105 3105 pull_request_version = relationship('PullRequestVersion')
3106 3106
3107 3107 @classmethod
3108 3108 def get_users(cls, revision=None, pull_request_id=None):
3109 3109 """
3110 3110 Returns user associated with this ChangesetComment. ie those
3111 3111 who actually commented
3112 3112
3113 3113 :param cls:
3114 3114 :param revision:
3115 3115 """
3116 3116 q = Session().query(User)\
3117 3117 .join(ChangesetComment.author)
3118 3118 if revision:
3119 3119 q = q.filter(cls.revision == revision)
3120 3120 elif pull_request_id:
3121 3121 q = q.filter(cls.pull_request_id == pull_request_id)
3122 3122 return q.all()
3123 3123
3124 3124 @classmethod
3125 3125 def get_index_from_version(cls, pr_version, versions):
3126 3126 num_versions = [x.pull_request_version_id for x in versions]
3127 3127 try:
3128 3128 return num_versions.index(pr_version) +1
3129 3129 except (IndexError, ValueError):
3130 3130 return
3131 3131
3132 3132 @property
3133 3133 def outdated(self):
3134 3134 return self.display_state == self.COMMENT_OUTDATED
3135 3135
3136 3136 def outdated_at_version(self, version):
3137 3137 """
3138 3138 Checks if comment is outdated for given pull request version
3139 3139 """
3140 3140 return self.outdated and self.pull_request_version_id != version
3141 3141
3142 3142 def older_than_version(self, version):
3143 3143 """
3144 3144 Checks if comment is made from previous version than given
3145 3145 """
3146 3146 if version is None:
3147 3147 return self.pull_request_version_id is not None
3148 3148
3149 3149 return self.pull_request_version_id < version
3150 3150
3151 3151 @property
3152 3152 def resolved(self):
3153 3153 return self.resolved_by[0] if self.resolved_by else None
3154 3154
3155 3155 @property
3156 3156 def is_todo(self):
3157 3157 return self.comment_type == self.COMMENT_TYPE_TODO
3158 3158
3159 3159 @property
3160 3160 def is_inline(self):
3161 3161 return self.line_no and self.f_path
3162 3162
3163 3163 def get_index_version(self, versions):
3164 3164 return self.get_index_from_version(
3165 3165 self.pull_request_version_id, versions)
3166 3166
3167 3167 def __repr__(self):
3168 3168 if self.comment_id:
3169 3169 return '<DB:Comment #%s>' % self.comment_id
3170 3170 else:
3171 3171 return '<DB:Comment at %#x>' % id(self)
3172 3172
3173 3173 def get_api_data(self):
3174 3174 comment = self
3175 3175 data = {
3176 3176 'comment_id': comment.comment_id,
3177 3177 'comment_type': comment.comment_type,
3178 3178 'comment_text': comment.text,
3179 3179 'comment_status': comment.status_change,
3180 3180 'comment_f_path': comment.f_path,
3181 3181 'comment_lineno': comment.line_no,
3182 3182 'comment_author': comment.author,
3183 3183 'comment_created_on': comment.created_on
3184 3184 }
3185 3185 return data
3186 3186
3187 3187 def __json__(self):
3188 3188 data = dict()
3189 3189 data.update(self.get_api_data())
3190 3190 return data
3191 3191
3192 3192
3193 3193 class ChangesetStatus(Base, BaseModel):
3194 3194 __tablename__ = 'changeset_statuses'
3195 3195 __table_args__ = (
3196 3196 Index('cs_revision_idx', 'revision'),
3197 3197 Index('cs_version_idx', 'version'),
3198 3198 UniqueConstraint('repo_id', 'revision', 'version'),
3199 3199 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3200 3200 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3201 3201 )
3202 3202 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3203 3203 STATUS_APPROVED = 'approved'
3204 3204 STATUS_REJECTED = 'rejected'
3205 3205 STATUS_UNDER_REVIEW = 'under_review'
3206 3206
3207 3207 STATUSES = [
3208 3208 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3209 3209 (STATUS_APPROVED, _("Approved")),
3210 3210 (STATUS_REJECTED, _("Rejected")),
3211 3211 (STATUS_UNDER_REVIEW, _("Under Review")),
3212 3212 ]
3213 3213
3214 3214 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3215 3215 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3216 3216 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3217 3217 revision = Column('revision', String(40), nullable=False)
3218 3218 status = Column('status', String(128), nullable=False, default=DEFAULT)
3219 3219 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3220 3220 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3221 3221 version = Column('version', Integer(), nullable=False, default=0)
3222 3222 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3223 3223
3224 3224 author = relationship('User', lazy='joined')
3225 3225 repo = relationship('Repository')
3226 3226 comment = relationship('ChangesetComment', lazy='joined')
3227 3227 pull_request = relationship('PullRequest', lazy='joined')
3228 3228
3229 3229 def __unicode__(self):
3230 3230 return u"<%s('%s[v%s]:%s')>" % (
3231 3231 self.__class__.__name__,
3232 3232 self.status, self.version, self.author
3233 3233 )
3234 3234
3235 3235 @classmethod
3236 3236 def get_status_lbl(cls, value):
3237 3237 return dict(cls.STATUSES).get(value)
3238 3238
3239 3239 @property
3240 3240 def status_lbl(self):
3241 3241 return ChangesetStatus.get_status_lbl(self.status)
3242 3242
3243 3243 def get_api_data(self):
3244 3244 status = self
3245 3245 data = {
3246 3246 'status_id': status.changeset_status_id,
3247 3247 'status': status.status,
3248 3248 }
3249 3249 return data
3250 3250
3251 3251 def __json__(self):
3252 3252 data = dict()
3253 3253 data.update(self.get_api_data())
3254 3254 return data
3255 3255
3256 3256
3257 3257 class _PullRequestBase(BaseModel):
3258 3258 """
3259 3259 Common attributes of pull request and version entries.
3260 3260 """
3261 3261
3262 3262 # .status values
3263 3263 STATUS_NEW = u'new'
3264 3264 STATUS_OPEN = u'open'
3265 3265 STATUS_CLOSED = u'closed'
3266 3266
3267 3267 title = Column('title', Unicode(255), nullable=True)
3268 3268 description = Column(
3269 3269 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3270 3270 nullable=True)
3271 3271 # new/open/closed status of pull request (not approve/reject/etc)
3272 3272 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3273 3273 created_on = Column(
3274 3274 'created_on', DateTime(timezone=False), nullable=False,
3275 3275 default=datetime.datetime.now)
3276 3276 updated_on = Column(
3277 3277 'updated_on', DateTime(timezone=False), nullable=False,
3278 3278 default=datetime.datetime.now)
3279 3279
3280 3280 @declared_attr
3281 3281 def user_id(cls):
3282 3282 return Column(
3283 3283 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3284 3284 unique=None)
3285 3285
3286 3286 # 500 revisions max
3287 3287 _revisions = Column(
3288 3288 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3289 3289
3290 3290 @declared_attr
3291 3291 def source_repo_id(cls):
3292 3292 # TODO: dan: rename column to source_repo_id
3293 3293 return Column(
3294 3294 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3295 3295 nullable=False)
3296 3296
3297 3297 source_ref = Column('org_ref', Unicode(255), nullable=False)
3298 3298
3299 3299 @declared_attr
3300 3300 def target_repo_id(cls):
3301 3301 # TODO: dan: rename column to target_repo_id
3302 3302 return Column(
3303 3303 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3304 3304 nullable=False)
3305 3305
3306 3306 target_ref = Column('other_ref', Unicode(255), nullable=False)
3307 3307 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3308 3308
3309 3309 # TODO: dan: rename column to last_merge_source_rev
3310 3310 _last_merge_source_rev = Column(
3311 3311 'last_merge_org_rev', String(40), nullable=True)
3312 3312 # TODO: dan: rename column to last_merge_target_rev
3313 3313 _last_merge_target_rev = Column(
3314 3314 'last_merge_other_rev', String(40), nullable=True)
3315 3315 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3316 3316 merge_rev = Column('merge_rev', String(40), nullable=True)
3317 3317
3318 3318 reviewer_data = Column(
3319 3319 'reviewer_data_json', MutationObj.as_mutable(
3320 3320 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3321 3321
3322 3322 @property
3323 3323 def reviewer_data_json(self):
3324 3324 return json.dumps(self.reviewer_data)
3325 3325
3326 3326 @hybrid_property
3327 3327 def description_safe(self):
3328 3328 from rhodecode.lib import helpers as h
3329 3329 return h.escape(self.description)
3330 3330
3331 3331 @hybrid_property
3332 3332 def revisions(self):
3333 3333 return self._revisions.split(':') if self._revisions else []
3334 3334
3335 3335 @revisions.setter
3336 3336 def revisions(self, val):
3337 3337 self._revisions = ':'.join(val)
3338 3338
3339 3339 @declared_attr
3340 3340 def author(cls):
3341 3341 return relationship('User', lazy='joined')
3342 3342
3343 3343 @declared_attr
3344 3344 def source_repo(cls):
3345 3345 return relationship(
3346 3346 'Repository',
3347 3347 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3348 3348
3349 3349 @property
3350 3350 def source_ref_parts(self):
3351 3351 return self.unicode_to_reference(self.source_ref)
3352 3352
3353 3353 @declared_attr
3354 3354 def target_repo(cls):
3355 3355 return relationship(
3356 3356 'Repository',
3357 3357 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3358 3358
3359 3359 @property
3360 3360 def target_ref_parts(self):
3361 3361 return self.unicode_to_reference(self.target_ref)
3362 3362
3363 3363 @property
3364 3364 def shadow_merge_ref(self):
3365 3365 return self.unicode_to_reference(self._shadow_merge_ref)
3366 3366
3367 3367 @shadow_merge_ref.setter
3368 3368 def shadow_merge_ref(self, ref):
3369 3369 self._shadow_merge_ref = self.reference_to_unicode(ref)
3370 3370
3371 3371 def unicode_to_reference(self, raw):
3372 3372 """
3373 3373 Convert a unicode (or string) to a reference object.
3374 3374 If unicode evaluates to False it returns None.
3375 3375 """
3376 3376 if raw:
3377 3377 refs = raw.split(':')
3378 3378 return Reference(*refs)
3379 3379 else:
3380 3380 return None
3381 3381
3382 3382 def reference_to_unicode(self, ref):
3383 3383 """
3384 3384 Convert a reference object to unicode.
3385 3385 If reference is None it returns None.
3386 3386 """
3387 3387 if ref:
3388 3388 return u':'.join(ref)
3389 3389 else:
3390 3390 return None
3391 3391
3392 3392 def get_api_data(self, with_merge_state=True):
3393 3393 from rhodecode.model.pull_request import PullRequestModel
3394 3394
3395 3395 pull_request = self
3396 3396 if with_merge_state:
3397 3397 merge_status = PullRequestModel().merge_status(pull_request)
3398 3398 merge_state = {
3399 3399 'status': merge_status[0],
3400 3400 'message': safe_unicode(merge_status[1]),
3401 3401 }
3402 3402 else:
3403 3403 merge_state = {'status': 'not_available',
3404 3404 'message': 'not_available'}
3405 3405
3406 3406 merge_data = {
3407 3407 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3408 3408 'reference': (
3409 3409 pull_request.shadow_merge_ref._asdict()
3410 3410 if pull_request.shadow_merge_ref else None),
3411 3411 }
3412 3412
3413 3413 data = {
3414 3414 'pull_request_id': pull_request.pull_request_id,
3415 3415 'url': PullRequestModel().get_url(pull_request),
3416 3416 'title': pull_request.title,
3417 3417 'description': pull_request.description,
3418 3418 'status': pull_request.status,
3419 3419 'created_on': pull_request.created_on,
3420 3420 'updated_on': pull_request.updated_on,
3421 3421 'commit_ids': pull_request.revisions,
3422 3422 'review_status': pull_request.calculated_review_status(),
3423 3423 'mergeable': merge_state,
3424 3424 'source': {
3425 3425 'clone_url': pull_request.source_repo.clone_url(),
3426 3426 'repository': pull_request.source_repo.repo_name,
3427 3427 'reference': {
3428 3428 'name': pull_request.source_ref_parts.name,
3429 3429 'type': pull_request.source_ref_parts.type,
3430 3430 'commit_id': pull_request.source_ref_parts.commit_id,
3431 3431 },
3432 3432 },
3433 3433 'target': {
3434 3434 'clone_url': pull_request.target_repo.clone_url(),
3435 3435 'repository': pull_request.target_repo.repo_name,
3436 3436 'reference': {
3437 3437 'name': pull_request.target_ref_parts.name,
3438 3438 'type': pull_request.target_ref_parts.type,
3439 3439 'commit_id': pull_request.target_ref_parts.commit_id,
3440 3440 },
3441 3441 },
3442 3442 'merge': merge_data,
3443 3443 'author': pull_request.author.get_api_data(include_secrets=False,
3444 3444 details='basic'),
3445 3445 'reviewers': [
3446 3446 {
3447 3447 'user': reviewer.get_api_data(include_secrets=False,
3448 3448 details='basic'),
3449 3449 'reasons': reasons,
3450 3450 'review_status': st[0][1].status if st else 'not_reviewed',
3451 3451 }
3452 3452 for reviewer, reasons, mandatory, st in
3453 3453 pull_request.reviewers_statuses()
3454 3454 ]
3455 3455 }
3456 3456
3457 3457 return data
3458 3458
3459 3459
3460 3460 class PullRequest(Base, _PullRequestBase):
3461 3461 __tablename__ = 'pull_requests'
3462 3462 __table_args__ = (
3463 3463 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3464 3464 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3465 3465 )
3466 3466
3467 3467 pull_request_id = Column(
3468 3468 'pull_request_id', Integer(), nullable=False, primary_key=True)
3469 3469
3470 3470 def __repr__(self):
3471 3471 if self.pull_request_id:
3472 3472 return '<DB:PullRequest #%s>' % self.pull_request_id
3473 3473 else:
3474 3474 return '<DB:PullRequest at %#x>' % id(self)
3475 3475
3476 3476 reviewers = relationship('PullRequestReviewers',
3477 3477 cascade="all, delete, delete-orphan")
3478 3478 statuses = relationship('ChangesetStatus',
3479 3479 cascade="all, delete, delete-orphan")
3480 3480 comments = relationship('ChangesetComment',
3481 3481 cascade="all, delete, delete-orphan")
3482 3482 versions = relationship('PullRequestVersion',
3483 3483 cascade="all, delete, delete-orphan",
3484 3484 lazy='dynamic')
3485 3485
3486 3486 @classmethod
3487 3487 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3488 3488 internal_methods=None):
3489 3489
3490 3490 class PullRequestDisplay(object):
3491 3491 """
3492 3492 Special object wrapper for showing PullRequest data via Versions
3493 3493 It mimics PR object as close as possible. This is read only object
3494 3494 just for display
3495 3495 """
3496 3496
3497 3497 def __init__(self, attrs, internal=None):
3498 3498 self.attrs = attrs
3499 3499 # internal have priority over the given ones via attrs
3500 3500 self.internal = internal or ['versions']
3501 3501
3502 3502 def __getattr__(self, item):
3503 3503 if item in self.internal:
3504 3504 return getattr(self, item)
3505 3505 try:
3506 3506 return self.attrs[item]
3507 3507 except KeyError:
3508 3508 raise AttributeError(
3509 3509 '%s object has no attribute %s' % (self, item))
3510 3510
3511 3511 def __repr__(self):
3512 3512 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3513 3513
3514 3514 def versions(self):
3515 3515 return pull_request_obj.versions.order_by(
3516 3516 PullRequestVersion.pull_request_version_id).all()
3517 3517
3518 3518 def is_closed(self):
3519 3519 return pull_request_obj.is_closed()
3520 3520
3521 3521 @property
3522 3522 def pull_request_version_id(self):
3523 3523 return getattr(pull_request_obj, 'pull_request_version_id', None)
3524 3524
3525 3525 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3526 3526
3527 3527 attrs.author = StrictAttributeDict(
3528 3528 pull_request_obj.author.get_api_data())
3529 3529 if pull_request_obj.target_repo:
3530 3530 attrs.target_repo = StrictAttributeDict(
3531 3531 pull_request_obj.target_repo.get_api_data())
3532 3532 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3533 3533
3534 3534 if pull_request_obj.source_repo:
3535 3535 attrs.source_repo = StrictAttributeDict(
3536 3536 pull_request_obj.source_repo.get_api_data())
3537 3537 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3538 3538
3539 3539 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3540 3540 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3541 3541 attrs.revisions = pull_request_obj.revisions
3542 3542
3543 3543 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3544 3544 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3545 3545 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3546 3546
3547 3547 return PullRequestDisplay(attrs, internal=internal_methods)
3548 3548
3549 3549 def is_closed(self):
3550 3550 return self.status == self.STATUS_CLOSED
3551 3551
3552 3552 def __json__(self):
3553 3553 return {
3554 3554 'revisions': self.revisions,
3555 3555 }
3556 3556
3557 3557 def calculated_review_status(self):
3558 3558 from rhodecode.model.changeset_status import ChangesetStatusModel
3559 3559 return ChangesetStatusModel().calculated_review_status(self)
3560 3560
3561 3561 def reviewers_statuses(self):
3562 3562 from rhodecode.model.changeset_status import ChangesetStatusModel
3563 3563 return ChangesetStatusModel().reviewers_statuses(self)
3564 3564
3565 3565 @property
3566 3566 def workspace_id(self):
3567 3567 from rhodecode.model.pull_request import PullRequestModel
3568 3568 return PullRequestModel()._workspace_id(self)
3569 3569
3570 3570 def get_shadow_repo(self):
3571 3571 workspace_id = self.workspace_id
3572 3572 vcs_obj = self.target_repo.scm_instance()
3573 3573 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3574 3574 workspace_id)
3575 3575 return vcs_obj._get_shadow_instance(shadow_repository_path)
3576 3576
3577 3577
3578 3578 class PullRequestVersion(Base, _PullRequestBase):
3579 3579 __tablename__ = 'pull_request_versions'
3580 3580 __table_args__ = (
3581 3581 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3582 3582 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3583 3583 )
3584 3584
3585 3585 pull_request_version_id = Column(
3586 3586 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3587 3587 pull_request_id = Column(
3588 3588 'pull_request_id', Integer(),
3589 3589 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3590 3590 pull_request = relationship('PullRequest')
3591 3591
3592 3592 def __repr__(self):
3593 3593 if self.pull_request_version_id:
3594 3594 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3595 3595 else:
3596 3596 return '<DB:PullRequestVersion at %#x>' % id(self)
3597 3597
3598 3598 @property
3599 3599 def reviewers(self):
3600 3600 return self.pull_request.reviewers
3601 3601
3602 3602 @property
3603 3603 def versions(self):
3604 3604 return self.pull_request.versions
3605 3605
3606 3606 def is_closed(self):
3607 3607 # calculate from original
3608 3608 return self.pull_request.status == self.STATUS_CLOSED
3609 3609
3610 3610 def calculated_review_status(self):
3611 3611 return self.pull_request.calculated_review_status()
3612 3612
3613 3613 def reviewers_statuses(self):
3614 3614 return self.pull_request.reviewers_statuses()
3615 3615
3616 3616
3617 3617 class PullRequestReviewers(Base, BaseModel):
3618 3618 __tablename__ = 'pull_request_reviewers'
3619 3619 __table_args__ = (
3620 3620 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3621 3621 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3622 3622 )
3623 3623
3624 3624 @hybrid_property
3625 3625 def reasons(self):
3626 3626 if not self._reasons:
3627 3627 return []
3628 3628 return self._reasons
3629 3629
3630 3630 @reasons.setter
3631 3631 def reasons(self, val):
3632 3632 val = val or []
3633 3633 if any(not isinstance(x, basestring) for x in val):
3634 3634 raise Exception('invalid reasons type, must be list of strings')
3635 3635 self._reasons = val
3636 3636
3637 3637 pull_requests_reviewers_id = Column(
3638 3638 'pull_requests_reviewers_id', Integer(), nullable=False,
3639 3639 primary_key=True)
3640 3640 pull_request_id = Column(
3641 3641 "pull_request_id", Integer(),
3642 3642 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3643 3643 user_id = Column(
3644 3644 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3645 3645 _reasons = Column(
3646 3646 'reason', MutationList.as_mutable(
3647 3647 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3648 3648 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3649 3649 user = relationship('User')
3650 3650 pull_request = relationship('PullRequest')
3651 3651
3652 3652
3653 3653 class Notification(Base, BaseModel):
3654 3654 __tablename__ = 'notifications'
3655 3655 __table_args__ = (
3656 3656 Index('notification_type_idx', 'type'),
3657 3657 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3658 3658 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3659 3659 )
3660 3660
3661 3661 TYPE_CHANGESET_COMMENT = u'cs_comment'
3662 3662 TYPE_MESSAGE = u'message'
3663 3663 TYPE_MENTION = u'mention'
3664 3664 TYPE_REGISTRATION = u'registration'
3665 3665 TYPE_PULL_REQUEST = u'pull_request'
3666 3666 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3667 3667
3668 3668 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3669 3669 subject = Column('subject', Unicode(512), nullable=True)
3670 3670 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3671 3671 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3672 3672 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3673 3673 type_ = Column('type', Unicode(255))
3674 3674
3675 3675 created_by_user = relationship('User')
3676 3676 notifications_to_users = relationship('UserNotification', lazy='joined',
3677 3677 cascade="all, delete, delete-orphan")
3678 3678
3679 3679 @property
3680 3680 def recipients(self):
3681 3681 return [x.user for x in UserNotification.query()\
3682 3682 .filter(UserNotification.notification == self)\
3683 3683 .order_by(UserNotification.user_id.asc()).all()]
3684 3684
3685 3685 @classmethod
3686 3686 def create(cls, created_by, subject, body, recipients, type_=None):
3687 3687 if type_ is None:
3688 3688 type_ = Notification.TYPE_MESSAGE
3689 3689
3690 3690 notification = cls()
3691 3691 notification.created_by_user = created_by
3692 3692 notification.subject = subject
3693 3693 notification.body = body
3694 3694 notification.type_ = type_
3695 3695 notification.created_on = datetime.datetime.now()
3696 3696
3697 3697 for u in recipients:
3698 3698 assoc = UserNotification()
3699 3699 assoc.notification = notification
3700 3700
3701 3701 # if created_by is inside recipients mark his notification
3702 3702 # as read
3703 3703 if u.user_id == created_by.user_id:
3704 3704 assoc.read = True
3705 3705
3706 3706 u.notifications.append(assoc)
3707 3707 Session().add(notification)
3708 3708
3709 3709 return notification
3710 3710
3711 3711 @property
3712 3712 def description(self):
3713 3713 from rhodecode.model.notification import NotificationModel
3714 3714 return NotificationModel().make_description(self)
3715 3715
3716 3716
3717 3717 class UserNotification(Base, BaseModel):
3718 3718 __tablename__ = 'user_to_notification'
3719 3719 __table_args__ = (
3720 3720 UniqueConstraint('user_id', 'notification_id'),
3721 3721 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3722 3722 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3723 3723 )
3724 3724 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3725 3725 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3726 3726 read = Column('read', Boolean, default=False)
3727 3727 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3728 3728
3729 3729 user = relationship('User', lazy="joined")
3730 3730 notification = relationship('Notification', lazy="joined",
3731 3731 order_by=lambda: Notification.created_on.desc(),)
3732 3732
3733 3733 def mark_as_read(self):
3734 3734 self.read = True
3735 3735 Session().add(self)
3736 3736
3737 3737
3738 3738 class Gist(Base, BaseModel):
3739 3739 __tablename__ = 'gists'
3740 3740 __table_args__ = (
3741 3741 Index('g_gist_access_id_idx', 'gist_access_id'),
3742 3742 Index('g_created_on_idx', 'created_on'),
3743 3743 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3744 3744 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3745 3745 )
3746 3746 GIST_PUBLIC = u'public'
3747 3747 GIST_PRIVATE = u'private'
3748 3748 DEFAULT_FILENAME = u'gistfile1.txt'
3749 3749
3750 3750 ACL_LEVEL_PUBLIC = u'acl_public'
3751 3751 ACL_LEVEL_PRIVATE = u'acl_private'
3752 3752
3753 3753 gist_id = Column('gist_id', Integer(), primary_key=True)
3754 3754 gist_access_id = Column('gist_access_id', Unicode(250))
3755 3755 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3756 3756 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3757 3757 gist_expires = Column('gist_expires', Float(53), nullable=False)
3758 3758 gist_type = Column('gist_type', Unicode(128), nullable=False)
3759 3759 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3760 3760 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3761 3761 acl_level = Column('acl_level', Unicode(128), nullable=True)
3762 3762
3763 3763 owner = relationship('User')
3764 3764
3765 3765 def __repr__(self):
3766 3766 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3767 3767
3768 3768 @hybrid_property
3769 3769 def description_safe(self):
3770 3770 from rhodecode.lib import helpers as h
3771 3771 return h.escape(self.gist_description)
3772 3772
3773 3773 @classmethod
3774 3774 def get_or_404(cls, id_, pyramid_exc=False):
3775 3775
3776 3776 if pyramid_exc:
3777 3777 from pyramid.httpexceptions import HTTPNotFound
3778 3778 else:
3779 3779 from webob.exc import HTTPNotFound
3780 3780
3781 3781 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3782 3782 if not res:
3783 3783 raise HTTPNotFound
3784 3784 return res
3785 3785
3786 3786 @classmethod
3787 3787 def get_by_access_id(cls, gist_access_id):
3788 3788 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3789 3789
3790 3790 def gist_url(self):
3791 3791 from rhodecode.model.gist import GistModel
3792 3792 return GistModel().get_url(self)
3793 3793
3794 3794 @classmethod
3795 3795 def base_path(cls):
3796 3796 """
3797 3797 Returns base path when all gists are stored
3798 3798
3799 3799 :param cls:
3800 3800 """
3801 3801 from rhodecode.model.gist import GIST_STORE_LOC
3802 3802 q = Session().query(RhodeCodeUi)\
3803 3803 .filter(RhodeCodeUi.ui_key == URL_SEP)
3804 3804 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3805 3805 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3806 3806
3807 3807 def get_api_data(self):
3808 3808 """
3809 3809 Common function for generating gist related data for API
3810 3810 """
3811 3811 gist = self
3812 3812 data = {
3813 3813 'gist_id': gist.gist_id,
3814 3814 'type': gist.gist_type,
3815 3815 'access_id': gist.gist_access_id,
3816 3816 'description': gist.gist_description,
3817 3817 'url': gist.gist_url(),
3818 3818 'expires': gist.gist_expires,
3819 3819 'created_on': gist.created_on,
3820 3820 'modified_at': gist.modified_at,
3821 3821 'content': None,
3822 3822 'acl_level': gist.acl_level,
3823 3823 }
3824 3824 return data
3825 3825
3826 3826 def __json__(self):
3827 3827 data = dict(
3828 3828 )
3829 3829 data.update(self.get_api_data())
3830 3830 return data
3831 3831 # SCM functions
3832 3832
3833 3833 def scm_instance(self, **kwargs):
3834 3834 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3835 3835 return get_vcs_instance(
3836 3836 repo_path=safe_str(full_repo_path), create=False)
3837 3837
3838 3838
3839 3839 class ExternalIdentity(Base, BaseModel):
3840 3840 __tablename__ = 'external_identities'
3841 3841 __table_args__ = (
3842 3842 Index('local_user_id_idx', 'local_user_id'),
3843 3843 Index('external_id_idx', 'external_id'),
3844 3844 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3845 3845 'mysql_charset': 'utf8'})
3846 3846
3847 3847 external_id = Column('external_id', Unicode(255), default=u'',
3848 3848 primary_key=True)
3849 3849 external_username = Column('external_username', Unicode(1024), default=u'')
3850 3850 local_user_id = Column('local_user_id', Integer(),
3851 3851 ForeignKey('users.user_id'), primary_key=True)
3852 3852 provider_name = Column('provider_name', Unicode(255), default=u'',
3853 3853 primary_key=True)
3854 3854 access_token = Column('access_token', String(1024), default=u'')
3855 3855 alt_token = Column('alt_token', String(1024), default=u'')
3856 3856 token_secret = Column('token_secret', String(1024), default=u'')
3857 3857
3858 3858 @classmethod
3859 3859 def by_external_id_and_provider(cls, external_id, provider_name,
3860 3860 local_user_id=None):
3861 3861 """
3862 3862 Returns ExternalIdentity instance based on search params
3863 3863
3864 3864 :param external_id:
3865 3865 :param provider_name:
3866 3866 :return: ExternalIdentity
3867 3867 """
3868 3868 query = cls.query()
3869 3869 query = query.filter(cls.external_id == external_id)
3870 3870 query = query.filter(cls.provider_name == provider_name)
3871 3871 if local_user_id:
3872 3872 query = query.filter(cls.local_user_id == local_user_id)
3873 3873 return query.first()
3874 3874
3875 3875 @classmethod
3876 3876 def user_by_external_id_and_provider(cls, external_id, provider_name):
3877 3877 """
3878 3878 Returns User instance based on search params
3879 3879
3880 3880 :param external_id:
3881 3881 :param provider_name:
3882 3882 :return: User
3883 3883 """
3884 3884 query = User.query()
3885 3885 query = query.filter(cls.external_id == external_id)
3886 3886 query = query.filter(cls.provider_name == provider_name)
3887 3887 query = query.filter(User.user_id == cls.local_user_id)
3888 3888 return query.first()
3889 3889
3890 3890 @classmethod
3891 3891 def by_local_user_id(cls, local_user_id):
3892 3892 """
3893 3893 Returns all tokens for user
3894 3894
3895 3895 :param local_user_id:
3896 3896 :return: ExternalIdentity
3897 3897 """
3898 3898 query = cls.query()
3899 3899 query = query.filter(cls.local_user_id == local_user_id)
3900 3900 return query
3901 3901
3902 3902
3903 3903 class Integration(Base, BaseModel):
3904 3904 __tablename__ = 'integrations'
3905 3905 __table_args__ = (
3906 3906 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3907 3907 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3908 3908 )
3909 3909
3910 3910 integration_id = Column('integration_id', Integer(), primary_key=True)
3911 3911 integration_type = Column('integration_type', String(255))
3912 3912 enabled = Column('enabled', Boolean(), nullable=False)
3913 3913 name = Column('name', String(255), nullable=False)
3914 3914 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
3915 3915 default=False)
3916 3916
3917 3917 settings = Column(
3918 3918 'settings_json', MutationObj.as_mutable(
3919 3919 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3920 3920 repo_id = Column(
3921 3921 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
3922 3922 nullable=True, unique=None, default=None)
3923 3923 repo = relationship('Repository', lazy='joined')
3924 3924
3925 3925 repo_group_id = Column(
3926 3926 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
3927 3927 nullable=True, unique=None, default=None)
3928 3928 repo_group = relationship('RepoGroup', lazy='joined')
3929 3929
3930 3930 @property
3931 3931 def scope(self):
3932 3932 if self.repo:
3933 3933 return repr(self.repo)
3934 3934 if self.repo_group:
3935 3935 if self.child_repos_only:
3936 3936 return repr(self.repo_group) + ' (child repos only)'
3937 3937 else:
3938 3938 return repr(self.repo_group) + ' (recursive)'
3939 3939 if self.child_repos_only:
3940 3940 return 'root_repos'
3941 3941 return 'global'
3942 3942
3943 3943 def __repr__(self):
3944 3944 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
3945 3945
3946 3946
3947 3947 class RepoReviewRuleUser(Base, BaseModel):
3948 3948 __tablename__ = 'repo_review_rules_users'
3949 3949 __table_args__ = (
3950 3950 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3951 3951 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3952 3952 )
3953 3953 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
3954 3954 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3955 3955 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
3956 3956 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3957 3957 user = relationship('User')
3958 3958
3959 3959 def rule_data(self):
3960 3960 return {
3961 3961 'mandatory': self.mandatory
3962 3962 }
3963 3963
3964 3964
3965 3965 class RepoReviewRuleUserGroup(Base, BaseModel):
3966 3966 __tablename__ = 'repo_review_rules_users_groups'
3967 3967 __table_args__ = (
3968 3968 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3969 3969 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3970 3970 )
3971 3971 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
3972 3972 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3973 3973 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
3974 3974 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3975 3975 users_group = relationship('UserGroup')
3976 3976
3977 3977 def rule_data(self):
3978 3978 return {
3979 3979 'mandatory': self.mandatory
3980 3980 }
3981 3981
3982 3982
3983 3983 class RepoReviewRule(Base, BaseModel):
3984 3984 __tablename__ = 'repo_review_rules'
3985 3985 __table_args__ = (
3986 3986 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3987 3987 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3988 3988 )
3989 3989
3990 3990 repo_review_rule_id = Column(
3991 3991 'repo_review_rule_id', Integer(), primary_key=True)
3992 3992 repo_id = Column(
3993 3993 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
3994 3994 repo = relationship('Repository', backref='review_rules')
3995 3995
3996 3996 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3997 3997 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3998 3998
3999 3999 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4000 4000 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4001 4001 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4002 4002 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4003 4003
4004 4004 rule_users = relationship('RepoReviewRuleUser')
4005 4005 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4006 4006
4007 4007 @hybrid_property
4008 4008 def branch_pattern(self):
4009 4009 return self._branch_pattern or '*'
4010 4010
4011 4011 def _validate_glob(self, value):
4012 4012 re.compile('^' + glob2re(value) + '$')
4013 4013
4014 4014 @branch_pattern.setter
4015 4015 def branch_pattern(self, value):
4016 4016 self._validate_glob(value)
4017 4017 self._branch_pattern = value or '*'
4018 4018
4019 4019 @hybrid_property
4020 4020 def file_pattern(self):
4021 4021 return self._file_pattern or '*'
4022 4022
4023 4023 @file_pattern.setter
4024 4024 def file_pattern(self, value):
4025 4025 self._validate_glob(value)
4026 4026 self._file_pattern = value or '*'
4027 4027
4028 4028 def matches(self, branch, files_changed):
4029 4029 """
4030 4030 Check if this review rule matches a branch/files in a pull request
4031 4031
4032 4032 :param branch: branch name for the commit
4033 4033 :param files_changed: list of file paths changed in the pull request
4034 4034 """
4035 4035
4036 4036 branch = branch or ''
4037 4037 files_changed = files_changed or []
4038 4038
4039 4039 branch_matches = True
4040 4040 if branch:
4041 4041 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4042 4042 branch_matches = bool(branch_regex.search(branch))
4043 4043
4044 4044 files_matches = True
4045 4045 if self.file_pattern != '*':
4046 4046 files_matches = False
4047 4047 file_regex = re.compile(glob2re(self.file_pattern))
4048 4048 for filename in files_changed:
4049 4049 if file_regex.search(filename):
4050 4050 files_matches = True
4051 4051 break
4052 4052
4053 4053 return branch_matches and files_matches
4054 4054
4055 4055 @property
4056 4056 def review_users(self):
4057 4057 """ Returns the users which this rule applies to """
4058 4058
4059 4059 users = collections.OrderedDict()
4060 4060
4061 4061 for rule_user in self.rule_users:
4062 4062 if rule_user.user.active:
4063 4063 if rule_user.user not in users:
4064 4064 users[rule_user.user.username] = {
4065 4065 'user': rule_user.user,
4066 4066 'source': 'user',
4067 4067 'source_data': {},
4068 4068 'data': rule_user.rule_data()
4069 4069 }
4070 4070
4071 4071 for rule_user_group in self.rule_user_groups:
4072 4072 source_data = {
4073 4073 'name': rule_user_group.users_group.users_group_name,
4074 4074 'members': len(rule_user_group.users_group.members)
4075 4075 }
4076 4076 for member in rule_user_group.users_group.members:
4077 4077 if member.user.active:
4078 4078 users[member.user.username] = {
4079 4079 'user': member.user,
4080 4080 'source': 'user_group',
4081 4081 'source_data': source_data,
4082 4082 'data': rule_user_group.rule_data()
4083 4083 }
4084 4084
4085 4085 return users
4086 4086
4087 4087 def __repr__(self):
4088 4088 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4089 4089 self.repo_review_rule_id, self.repo)
4090 4090
4091 4091
4092 4092 class DbMigrateVersion(Base, BaseModel):
4093 4093 __tablename__ = 'db_migrate_version'
4094 4094 __table_args__ = (
4095 4095 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4096 4096 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4097 4097 )
4098 4098 repository_id = Column('repository_id', String(250), primary_key=True)
4099 4099 repository_path = Column('repository_path', Text)
4100 4100 version = Column('version', Integer)
4101 4101
4102 4102
4103 4103 class DbSession(Base, BaseModel):
4104 4104 __tablename__ = 'db_session'
4105 4105 __table_args__ = (
4106 4106 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4107 4107 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4108 4108 )
4109 4109
4110 4110 def __repr__(self):
4111 4111 return '<DB:DbSession({})>'.format(self.id)
4112 4112
4113 4113 id = Column('id', Integer())
4114 4114 namespace = Column('namespace', String(255), primary_key=True)
4115 4115 accessed = Column('accessed', DateTime, nullable=False)
4116 4116 created = Column('created', DateTime, nullable=False)
4117 4117 data = Column('data', PickleType, nullable=False)
@@ -1,907 +1,908 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 users model for RhodeCode
23 23 """
24 24
25 25 import logging
26 26 import traceback
27 27
28 28 import datetime
29 29 from pylons.i18n.translation import _
30 30
31 31 import ipaddress
32 32 from sqlalchemy.exc import DatabaseError
33 33
34 34 from rhodecode import events
35 35 from rhodecode.lib.user_log_filter import user_log_filter
36 36 from rhodecode.lib.utils2 import (
37 37 safe_unicode, get_current_rhodecode_user, action_logger_generic,
38 38 AttributeDict, str2bool)
39 39 from rhodecode.lib.exceptions import (
40 40 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
41 41 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
42 42 from rhodecode.lib.caching_query import FromCache
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.auth_token import AuthTokenModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, true, false, or_, joinedload, User, UserToPerm,
47 47 UserEmailMap, UserIpMap, UserLog)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.repo_group import RepoGroupModel
50 50
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class UserModel(BaseModel):
56 56 cls = User
57 57
58 58 def get(self, user_id, cache=False):
59 59 user = self.sa.query(User)
60 60 if cache:
61 61 user = user.options(
62 62 FromCache("sql_cache_short", "get_user_%s" % user_id))
63 63 return user.get(user_id)
64 64
65 65 def get_user(self, user):
66 66 return self._get_user(user)
67 67
68 68 def _serialize_user(self, user):
69 69 import rhodecode.lib.helpers as h
70 70
71 71 return {
72 72 'id': user.user_id,
73 73 'first_name': user.first_name,
74 74 'last_name': user.last_name,
75 75 'username': user.username,
76 76 'email': user.email,
77 77 'icon_link': h.gravatar_url(user.email, 30),
78 78 'value_display': h.escape(h.person(user)),
79 79 'value': user.username,
80 80 'value_type': 'user',
81 81 'active': user.active,
82 82 }
83 83
84 84 def get_users(self, name_contains=None, limit=20, only_active=True):
85 85
86 86 query = self.sa.query(User)
87 87 if only_active:
88 88 query = query.filter(User.active == true())
89 89
90 90 if name_contains:
91 91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
92 92 query = query.filter(
93 93 or_(
94 94 User.name.ilike(ilike_expression),
95 95 User.lastname.ilike(ilike_expression),
96 96 User.username.ilike(ilike_expression)
97 97 )
98 98 )
99 99 query = query.limit(limit)
100 100 users = query.all()
101 101
102 102 _users = [
103 103 self._serialize_user(user) for user in users
104 104 ]
105 105 return _users
106 106
107 107 def get_by_username(self, username, cache=False, case_insensitive=False):
108 108
109 109 if case_insensitive:
110 110 user = self.sa.query(User).filter(User.username.ilike(username))
111 111 else:
112 112 user = self.sa.query(User)\
113 113 .filter(User.username == username)
114 114 if cache:
115 115 name_key = _hash_key(username)
116 116 user = user.options(
117 117 FromCache("sql_cache_short", "get_user_%s" % name_key))
118 118 return user.scalar()
119 119
120 120 def get_by_email(self, email, cache=False, case_insensitive=False):
121 121 return User.get_by_email(email, case_insensitive, cache)
122 122
123 123 def get_by_auth_token(self, auth_token, cache=False):
124 124 return User.get_by_auth_token(auth_token, cache)
125 125
126 126 def get_active_user_count(self, cache=False):
127 127 return User.query().filter(
128 128 User.active == True).filter(
129 129 User.username != User.DEFAULT_USER).count()
130 130
131 131 def create(self, form_data, cur_user=None):
132 132 if not cur_user:
133 133 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
134 134
135 135 user_data = {
136 136 'username': form_data['username'],
137 137 'password': form_data['password'],
138 138 'email': form_data['email'],
139 139 'firstname': form_data['firstname'],
140 140 'lastname': form_data['lastname'],
141 141 'active': form_data['active'],
142 142 'extern_type': form_data['extern_type'],
143 143 'extern_name': form_data['extern_name'],
144 144 'admin': False,
145 145 'cur_user': cur_user
146 146 }
147 147
148 148 if 'create_repo_group' in form_data:
149 149 user_data['create_repo_group'] = str2bool(
150 150 form_data.get('create_repo_group'))
151 151
152 152 try:
153 153 if form_data.get('password_change'):
154 154 user_data['force_password_change'] = True
155 155 return UserModel().create_or_update(**user_data)
156 156 except Exception:
157 157 log.error(traceback.format_exc())
158 158 raise
159 159
160 160 def update_user(self, user, skip_attrs=None, **kwargs):
161 161 from rhodecode.lib.auth import get_crypt_password
162 162
163 163 user = self._get_user(user)
164 164 if user.username == User.DEFAULT_USER:
165 165 raise DefaultUserException(
166 166 _("You can't Edit this user since it's"
167 167 " crucial for entire application"))
168 168
169 169 # first store only defaults
170 170 user_attrs = {
171 171 'updating_user_id': user.user_id,
172 172 'username': user.username,
173 173 'password': user.password,
174 174 'email': user.email,
175 175 'firstname': user.name,
176 176 'lastname': user.lastname,
177 177 'active': user.active,
178 178 'admin': user.admin,
179 179 'extern_name': user.extern_name,
180 180 'extern_type': user.extern_type,
181 181 'language': user.user_data.get('language')
182 182 }
183 183
184 184 # in case there's new_password, that comes from form, use it to
185 185 # store password
186 186 if kwargs.get('new_password'):
187 187 kwargs['password'] = kwargs['new_password']
188 188
189 189 # cleanups, my_account password change form
190 190 kwargs.pop('current_password', None)
191 191 kwargs.pop('new_password', None)
192 192
193 193 # cleanups, user edit password change form
194 194 kwargs.pop('password_confirmation', None)
195 195 kwargs.pop('password_change', None)
196 196
197 197 # create repo group on user creation
198 198 kwargs.pop('create_repo_group', None)
199 199
200 200 # legacy forms send name, which is the firstname
201 201 firstname = kwargs.pop('name', None)
202 202 if firstname:
203 203 kwargs['firstname'] = firstname
204 204
205 205 for k, v in kwargs.items():
206 206 # skip if we don't want to update this
207 207 if skip_attrs and k in skip_attrs:
208 208 continue
209 209
210 210 user_attrs[k] = v
211 211
212 212 try:
213 213 return self.create_or_update(**user_attrs)
214 214 except Exception:
215 215 log.error(traceback.format_exc())
216 216 raise
217 217
218 218 def create_or_update(
219 219 self, username, password, email, firstname='', lastname='',
220 220 active=True, admin=False, extern_type=None, extern_name=None,
221 221 cur_user=None, plugin=None, force_password_change=False,
222 222 allow_to_create_user=True, create_repo_group=None,
223 223 updating_user_id=None, language=None, strict_creation_check=True):
224 224 """
225 225 Creates a new instance if not found, or updates current one
226 226
227 227 :param username:
228 228 :param password:
229 229 :param email:
230 230 :param firstname:
231 231 :param lastname:
232 232 :param active:
233 233 :param admin:
234 234 :param extern_type:
235 235 :param extern_name:
236 236 :param cur_user:
237 237 :param plugin: optional plugin this method was called from
238 238 :param force_password_change: toggles new or existing user flag
239 239 for password change
240 240 :param allow_to_create_user: Defines if the method can actually create
241 241 new users
242 242 :param create_repo_group: Defines if the method should also
243 243 create an repo group with user name, and owner
244 244 :param updating_user_id: if we set it up this is the user we want to
245 245 update this allows to editing username.
246 246 :param language: language of user from interface.
247 247
248 248 :returns: new User object with injected `is_new_user` attribute.
249 249 """
250 250 if not cur_user:
251 251 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
252 252
253 253 from rhodecode.lib.auth import (
254 254 get_crypt_password, check_password, generate_auth_token)
255 255 from rhodecode.lib.hooks_base import (
256 256 log_create_user, check_allowed_create_user)
257 257
258 258 def _password_change(new_user, password):
259 259 # empty password
260 260 if not new_user.password:
261 261 return False
262 262
263 263 # password check is only needed for RhodeCode internal auth calls
264 264 # in case it's a plugin we don't care
265 265 if not plugin:
266 266
267 267 # first check if we gave crypted password back, and if it
268 268 # matches it's not password change
269 269 if new_user.password == password:
270 270 return False
271 271
272 272 password_match = check_password(password, new_user.password)
273 273 if not password_match:
274 274 return True
275 275
276 276 return False
277 277
278 278 # read settings on default personal repo group creation
279 279 if create_repo_group is None:
280 280 default_create_repo_group = RepoGroupModel()\
281 281 .get_default_create_personal_repo_group()
282 282 create_repo_group = default_create_repo_group
283 283
284 284 user_data = {
285 285 'username': username,
286 286 'password': password,
287 287 'email': email,
288 288 'firstname': firstname,
289 289 'lastname': lastname,
290 290 'active': active,
291 291 'admin': admin
292 292 }
293 293
294 294 if updating_user_id:
295 295 log.debug('Checking for existing account in RhodeCode '
296 296 'database with user_id `%s` ' % (updating_user_id,))
297 297 user = User.get(updating_user_id)
298 298 else:
299 299 log.debug('Checking for existing account in RhodeCode '
300 300 'database with username `%s` ' % (username,))
301 301 user = User.get_by_username(username, case_insensitive=True)
302 302
303 303 if user is None:
304 304 # we check internal flag if this method is actually allowed to
305 305 # create new user
306 306 if not allow_to_create_user:
307 307 msg = ('Method wants to create new user, but it is not '
308 308 'allowed to do so')
309 309 log.warning(msg)
310 310 raise NotAllowedToCreateUserError(msg)
311 311
312 312 log.debug('Creating new user %s', username)
313 313
314 314 # only if we create user that is active
315 315 new_active_user = active
316 316 if new_active_user and strict_creation_check:
317 317 # raises UserCreationError if it's not allowed for any reason to
318 318 # create new active user, this also executes pre-create hooks
319 319 check_allowed_create_user(user_data, cur_user, strict_check=True)
320 320 events.trigger(events.UserPreCreate(user_data))
321 321 new_user = User()
322 322 edit = False
323 323 else:
324 324 log.debug('updating user %s', username)
325 325 events.trigger(events.UserPreUpdate(user, user_data))
326 326 new_user = user
327 327 edit = True
328 328
329 329 # we're not allowed to edit default user
330 330 if user.username == User.DEFAULT_USER:
331 331 raise DefaultUserException(
332 332 _("You can't edit this user (`%(username)s`) since it's "
333 333 "crucial for entire application") % {'username': user.username})
334 334
335 335 # inject special attribute that will tell us if User is new or old
336 336 new_user.is_new_user = not edit
337 337 # for users that didn's specify auth type, we use RhodeCode built in
338 338 from rhodecode.authentication.plugins import auth_rhodecode
339 339 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
340 340 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
341 341
342 342 try:
343 343 new_user.username = username
344 344 new_user.admin = admin
345 345 new_user.email = email
346 346 new_user.active = active
347 347 new_user.extern_name = safe_unicode(extern_name)
348 348 new_user.extern_type = safe_unicode(extern_type)
349 349 new_user.name = firstname
350 350 new_user.lastname = lastname
351 351
352 352 # set password only if creating an user or password is changed
353 353 if not edit or _password_change(new_user, password):
354 354 reason = 'new password' if edit else 'new user'
355 355 log.debug('Updating password reason=>%s', reason)
356 356 new_user.password = get_crypt_password(password) if password else None
357 357
358 358 if force_password_change:
359 359 new_user.update_userdata(force_password_change=True)
360 360 if language:
361 361 new_user.update_userdata(language=language)
362 362 new_user.update_userdata(notification_status=True)
363 363
364 364 self.sa.add(new_user)
365 365
366 366 if not edit and create_repo_group:
367 367 RepoGroupModel().create_personal_repo_group(
368 368 new_user, commit_early=False)
369 369
370 370 if not edit:
371 371 # add the RSS token
372 372 AuthTokenModel().create(username,
373 373 description='Generated feed token',
374 374 role=AuthTokenModel.cls.ROLE_FEED)
375 375 log_create_user(created_by=cur_user, **new_user.get_dict())
376 376 events.trigger(events.UserPostCreate(user_data))
377 377 return new_user
378 378 except (DatabaseError,):
379 379 log.error(traceback.format_exc())
380 380 raise
381 381
382 382 def create_registration(self, form_data):
383 383 from rhodecode.model.notification import NotificationModel
384 384 from rhodecode.model.notification import EmailNotificationModel
385 385
386 386 try:
387 387 form_data['admin'] = False
388 388 form_data['extern_name'] = 'rhodecode'
389 389 form_data['extern_type'] = 'rhodecode'
390 390 new_user = self.create(form_data)
391 391
392 392 self.sa.add(new_user)
393 393 self.sa.flush()
394 394
395 395 user_data = new_user.get_dict()
396 396 kwargs = {
397 397 # use SQLALCHEMY safe dump of user data
398 398 'user': AttributeDict(user_data),
399 399 'date': datetime.datetime.now()
400 400 }
401 401 notification_type = EmailNotificationModel.TYPE_REGISTRATION
402 402 # pre-generate the subject for notification itself
403 403 (subject,
404 404 _h, _e, # we don't care about those
405 405 body_plaintext) = EmailNotificationModel().render_email(
406 406 notification_type, **kwargs)
407 407
408 408 # create notification objects, and emails
409 409 NotificationModel().create(
410 410 created_by=new_user,
411 411 notification_subject=subject,
412 412 notification_body=body_plaintext,
413 413 notification_type=notification_type,
414 414 recipients=None, # all admins
415 415 email_kwargs=kwargs,
416 416 )
417 417
418 418 return new_user
419 419 except Exception:
420 420 log.error(traceback.format_exc())
421 421 raise
422 422
423 423 def _handle_user_repos(self, username, repositories, handle_mode=None):
424 424 _superadmin = self.cls.get_first_super_admin()
425 425 left_overs = True
426 426
427 427 from rhodecode.model.repo import RepoModel
428 428
429 429 if handle_mode == 'detach':
430 430 for obj in repositories:
431 431 obj.user = _superadmin
432 432 # set description we know why we super admin now owns
433 433 # additional repositories that were orphaned !
434 434 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
435 435 self.sa.add(obj)
436 436 left_overs = False
437 437 elif handle_mode == 'delete':
438 438 for obj in repositories:
439 439 RepoModel().delete(obj, forks='detach')
440 440 left_overs = False
441 441
442 442 # if nothing is done we have left overs left
443 443 return left_overs
444 444
445 445 def _handle_user_repo_groups(self, username, repository_groups,
446 446 handle_mode=None):
447 447 _superadmin = self.cls.get_first_super_admin()
448 448 left_overs = True
449 449
450 450 from rhodecode.model.repo_group import RepoGroupModel
451 451
452 452 if handle_mode == 'detach':
453 453 for r in repository_groups:
454 454 r.user = _superadmin
455 455 # set description we know why we super admin now owns
456 456 # additional repositories that were orphaned !
457 457 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
458 458 self.sa.add(r)
459 459 left_overs = False
460 460 elif handle_mode == 'delete':
461 461 for r in repository_groups:
462 462 RepoGroupModel().delete(r)
463 463 left_overs = False
464 464
465 465 # if nothing is done we have left overs left
466 466 return left_overs
467 467
468 468 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
469 469 _superadmin = self.cls.get_first_super_admin()
470 470 left_overs = True
471 471
472 472 from rhodecode.model.user_group import UserGroupModel
473 473
474 474 if handle_mode == 'detach':
475 475 for r in user_groups:
476 476 for user_user_group_to_perm in r.user_user_group_to_perm:
477 477 if user_user_group_to_perm.user.username == username:
478 478 user_user_group_to_perm.user = _superadmin
479 479 r.user = _superadmin
480 480 # set description we know why we super admin now owns
481 481 # additional repositories that were orphaned !
482 482 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
483 483 self.sa.add(r)
484 484 left_overs = False
485 485 elif handle_mode == 'delete':
486 486 for r in user_groups:
487 487 UserGroupModel().delete(r)
488 488 left_overs = False
489 489
490 490 # if nothing is done we have left overs left
491 491 return left_overs
492 492
493 493 def delete(self, user, cur_user=None, handle_repos=None,
494 494 handle_repo_groups=None, handle_user_groups=None):
495 495 if not cur_user:
496 496 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
497 497 user = self._get_user(user)
498 498
499 499 try:
500 500 if user.username == User.DEFAULT_USER:
501 501 raise DefaultUserException(
502 502 _(u"You can't remove this user since it's"
503 503 u" crucial for entire application"))
504 504
505 505 left_overs = self._handle_user_repos(
506 506 user.username, user.repositories, handle_repos)
507 507 if left_overs and user.repositories:
508 508 repos = [x.repo_name for x in user.repositories]
509 509 raise UserOwnsReposException(
510 510 _(u'user "%s" still owns %s repositories and cannot be '
511 511 u'removed. Switch owners or remove those repositories:%s')
512 512 % (user.username, len(repos), ', '.join(repos)))
513 513
514 514 left_overs = self._handle_user_repo_groups(
515 515 user.username, user.repository_groups, handle_repo_groups)
516 516 if left_overs and user.repository_groups:
517 517 repo_groups = [x.group_name for x in user.repository_groups]
518 518 raise UserOwnsRepoGroupsException(
519 519 _(u'user "%s" still owns %s repository groups and cannot be '
520 520 u'removed. Switch owners or remove those repository groups:%s')
521 521 % (user.username, len(repo_groups), ', '.join(repo_groups)))
522 522
523 523 left_overs = self._handle_user_user_groups(
524 524 user.username, user.user_groups, handle_user_groups)
525 525 if left_overs and user.user_groups:
526 526 user_groups = [x.users_group_name for x in user.user_groups]
527 527 raise UserOwnsUserGroupsException(
528 528 _(u'user "%s" still owns %s user groups and cannot be '
529 529 u'removed. Switch owners or remove those user groups:%s')
530 530 % (user.username, len(user_groups), ', '.join(user_groups)))
531 531
532 532 # we might change the user data with detach/delete, make sure
533 533 # the object is marked as expired before actually deleting !
534 534 self.sa.expire(user)
535 535 self.sa.delete(user)
536 536 from rhodecode.lib.hooks_base import log_delete_user
537 537 log_delete_user(deleted_by=cur_user, **user.get_dict())
538 538 except Exception:
539 539 log.error(traceback.format_exc())
540 540 raise
541 541
542 542 def reset_password_link(self, data, pwd_reset_url):
543 543 from rhodecode.lib.celerylib import tasks, run_task
544 544 from rhodecode.model.notification import EmailNotificationModel
545 545 user_email = data['email']
546 546 try:
547 547 user = User.get_by_email(user_email)
548 548 if user:
549 549 log.debug('password reset user found %s', user)
550 550
551 551 email_kwargs = {
552 552 'password_reset_url': pwd_reset_url,
553 553 'user': user,
554 554 'email': user_email,
555 555 'date': datetime.datetime.now()
556 556 }
557 557
558 558 (subject, headers, email_body,
559 559 email_body_plaintext) = EmailNotificationModel().render_email(
560 560 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
561 561
562 562 recipients = [user_email]
563 563
564 564 action_logger_generic(
565 565 'sending password reset email to user: {}'.format(
566 566 user), namespace='security.password_reset')
567 567
568 568 run_task(tasks.send_email, recipients, subject,
569 569 email_body_plaintext, email_body)
570 570
571 571 else:
572 572 log.debug("password reset email %s not found", user_email)
573 573 except Exception:
574 574 log.error(traceback.format_exc())
575 575 return False
576 576
577 577 return True
578 578
579 579 def reset_password(self, data):
580 580 from rhodecode.lib.celerylib import tasks, run_task
581 581 from rhodecode.model.notification import EmailNotificationModel
582 582 from rhodecode.lib import auth
583 583 user_email = data['email']
584 584 pre_db = True
585 585 try:
586 586 user = User.get_by_email(user_email)
587 587 new_passwd = auth.PasswordGenerator().gen_password(
588 588 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
589 589 if user:
590 590 user.password = auth.get_crypt_password(new_passwd)
591 591 # also force this user to reset his password !
592 592 user.update_userdata(force_password_change=True)
593 593
594 594 Session().add(user)
595 595
596 596 # now delete the token in question
597 597 UserApiKeys = AuthTokenModel.cls
598 598 UserApiKeys().query().filter(
599 599 UserApiKeys.api_key == data['token']).delete()
600 600
601 601 Session().commit()
602 602 log.info('successfully reset password for `%s`', user_email)
603 603
604 604 if new_passwd is None:
605 605 raise Exception('unable to generate new password')
606 606
607 607 pre_db = False
608 608
609 609 email_kwargs = {
610 610 'new_password': new_passwd,
611 611 'user': user,
612 612 'email': user_email,
613 613 'date': datetime.datetime.now()
614 614 }
615 615
616 616 (subject, headers, email_body,
617 617 email_body_plaintext) = EmailNotificationModel().render_email(
618 618 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
619 619 **email_kwargs)
620 620
621 621 recipients = [user_email]
622 622
623 623 action_logger_generic(
624 624 'sent new password to user: {} with email: {}'.format(
625 625 user, user_email), namespace='security.password_reset')
626 626
627 627 run_task(tasks.send_email, recipients, subject,
628 628 email_body_plaintext, email_body)
629 629
630 630 except Exception:
631 631 log.error('Failed to update user password')
632 632 log.error(traceback.format_exc())
633 633 if pre_db:
634 634 # we rollback only if local db stuff fails. If it goes into
635 635 # run_task, we're pass rollback state this wouldn't work then
636 636 Session().rollback()
637 637
638 638 return True
639 639
640 640 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
641 641 """
642 642 Fetches auth_user by user_id,or api_key if present.
643 643 Fills auth_user attributes with those taken from database.
644 644 Additionally set's is_authenitated if lookup fails
645 645 present in database
646 646
647 647 :param auth_user: instance of user to set attributes
648 648 :param user_id: user id to fetch by
649 649 :param api_key: api key to fetch by
650 650 :param username: username to fetch by
651 651 """
652 652 if user_id is None and api_key is None and username is None:
653 653 raise Exception('You need to pass user_id, api_key or username')
654 654
655 655 log.debug(
656 656 'doing fill data based on: user_id:%s api_key:%s username:%s',
657 657 user_id, api_key, username)
658 658 try:
659 659 dbuser = None
660 660 if user_id:
661 661 dbuser = self.get(user_id)
662 662 elif api_key:
663 663 dbuser = self.get_by_auth_token(api_key)
664 664 elif username:
665 665 dbuser = self.get_by_username(username)
666 666
667 667 if not dbuser:
668 668 log.warning(
669 669 'Unable to lookup user by id:%s api_key:%s username:%s',
670 670 user_id, api_key, username)
671 671 return False
672 672 if not dbuser.active:
673 673 log.debug('User `%s:%s` is inactive, skipping fill data',
674 674 username, user_id)
675 675 return False
676 676
677 677 log.debug('filling user:%s data', dbuser)
678 678
679 679 # TODO: johbo: Think about this and find a clean solution
680 680 user_data = dbuser.get_dict()
681 681 user_data.update(dbuser.get_api_data(include_secrets=True))
682 682 user_data.update({
683 683 # set explicit the safe escaped values
684 684 'first_name': dbuser.first_name,
685 685 'last_name': dbuser.last_name,
686 686 })
687 687
688 688 for k, v in user_data.iteritems():
689 689 # properties of auth user we dont update
690 690 if k not in ['auth_tokens', 'permissions']:
691 691 setattr(auth_user, k, v)
692 692
693 693 # few extras
694 694 setattr(auth_user, 'feed_token', dbuser.feed_token)
695 695 except Exception:
696 696 log.error(traceback.format_exc())
697 697 auth_user.is_authenticated = False
698 698 return False
699 699
700 700 return True
701 701
702 702 def has_perm(self, user, perm):
703 703 perm = self._get_perm(perm)
704 704 user = self._get_user(user)
705 705
706 706 return UserToPerm.query().filter(UserToPerm.user == user)\
707 707 .filter(UserToPerm.permission == perm).scalar() is not None
708 708
709 709 def grant_perm(self, user, perm):
710 710 """
711 711 Grant user global permissions
712 712
713 713 :param user:
714 714 :param perm:
715 715 """
716 716 user = self._get_user(user)
717 717 perm = self._get_perm(perm)
718 718 # if this permission is already granted skip it
719 719 _perm = UserToPerm.query()\
720 720 .filter(UserToPerm.user == user)\
721 721 .filter(UserToPerm.permission == perm)\
722 722 .scalar()
723 723 if _perm:
724 724 return
725 725 new = UserToPerm()
726 726 new.user = user
727 727 new.permission = perm
728 728 self.sa.add(new)
729 729 return new
730 730
731 731 def revoke_perm(self, user, perm):
732 732 """
733 733 Revoke users global permissions
734 734
735 735 :param user:
736 736 :param perm:
737 737 """
738 738 user = self._get_user(user)
739 739 perm = self._get_perm(perm)
740 740
741 741 obj = UserToPerm.query()\
742 742 .filter(UserToPerm.user == user)\
743 743 .filter(UserToPerm.permission == perm)\
744 744 .scalar()
745 745 if obj:
746 746 self.sa.delete(obj)
747 747
748 748 def add_extra_email(self, user, email):
749 749 """
750 750 Adds email address to UserEmailMap
751 751
752 752 :param user:
753 753 :param email:
754 754 """
755 755 from rhodecode.model import forms
756 756 form = forms.UserExtraEmailForm()()
757 757 data = form.to_python({'email': email})
758 758 user = self._get_user(user)
759 759
760 760 obj = UserEmailMap()
761 761 obj.user = user
762 762 obj.email = data['email']
763 763 self.sa.add(obj)
764 764 return obj
765 765
766 766 def delete_extra_email(self, user, email_id):
767 767 """
768 768 Removes email address from UserEmailMap
769 769
770 770 :param user:
771 771 :param email_id:
772 772 """
773 773 user = self._get_user(user)
774 774 obj = UserEmailMap.query().get(email_id)
775 775 if obj and obj.user_id == user.user_id:
776 776 self.sa.delete(obj)
777 777
778 778 def parse_ip_range(self, ip_range):
779 779 ip_list = []
780
780 781 def make_unique(value):
781 782 seen = []
782 783 return [c for c in value if not (c in seen or seen.append(c))]
783 784
784 785 # firsts split by commas
785 786 for ip_range in ip_range.split(','):
786 787 if not ip_range:
787 788 continue
788 789 ip_range = ip_range.strip()
789 790 if '-' in ip_range:
790 791 start_ip, end_ip = ip_range.split('-', 1)
791 start_ip = ipaddress.ip_address(start_ip.strip())
792 end_ip = ipaddress.ip_address(end_ip.strip())
792 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
793 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
793 794 parsed_ip_range = []
794 795
795 796 for index in xrange(int(start_ip), int(end_ip) + 1):
796 797 new_ip = ipaddress.ip_address(index)
797 798 parsed_ip_range.append(str(new_ip))
798 799 ip_list.extend(parsed_ip_range)
799 800 else:
800 801 ip_list.append(ip_range)
801 802
802 803 return make_unique(ip_list)
803 804
804 805 def add_extra_ip(self, user, ip, description=None):
805 806 """
806 807 Adds ip address to UserIpMap
807 808
808 809 :param user:
809 810 :param ip:
810 811 """
811 812 from rhodecode.model import forms
812 813 form = forms.UserExtraIpForm()()
813 814 data = form.to_python({'ip': ip})
814 815 user = self._get_user(user)
815 816
816 817 obj = UserIpMap()
817 818 obj.user = user
818 819 obj.ip_addr = data['ip']
819 820 obj.description = description
820 821 self.sa.add(obj)
821 822 return obj
822 823
823 824 def delete_extra_ip(self, user, ip_id):
824 825 """
825 826 Removes ip address from UserIpMap
826 827
827 828 :param user:
828 829 :param ip_id:
829 830 """
830 831 user = self._get_user(user)
831 832 obj = UserIpMap.query().get(ip_id)
832 833 if obj and obj.user_id == user.user_id:
833 834 self.sa.delete(obj)
834 835
835 836 def get_accounts_in_creation_order(self, current_user=None):
836 837 """
837 838 Get accounts in order of creation for deactivation for license limits
838 839
839 840 pick currently logged in user, and append to the list in position 0
840 841 pick all super-admins in order of creation date and add it to the list
841 842 pick all other accounts in order of creation and add it to the list.
842 843
843 844 Based on that list, the last accounts can be disabled as they are
844 845 created at the end and don't include any of the super admins as well
845 846 as the current user.
846 847
847 848 :param current_user: optionally current user running this operation
848 849 """
849 850
850 851 if not current_user:
851 852 current_user = get_current_rhodecode_user()
852 853 active_super_admins = [
853 854 x.user_id for x in User.query()
854 855 .filter(User.user_id != current_user.user_id)
855 856 .filter(User.active == true())
856 857 .filter(User.admin == true())
857 858 .order_by(User.created_on.asc())]
858 859
859 860 active_regular_users = [
860 861 x.user_id for x in User.query()
861 862 .filter(User.user_id != current_user.user_id)
862 863 .filter(User.active == true())
863 864 .filter(User.admin == false())
864 865 .order_by(User.created_on.asc())]
865 866
866 867 list_of_accounts = [current_user.user_id]
867 868 list_of_accounts += active_super_admins
868 869 list_of_accounts += active_regular_users
869 870
870 871 return list_of_accounts
871 872
872 873 def deactivate_last_users(self, expected_users):
873 874 """
874 875 Deactivate accounts that are over the license limits.
875 876 Algorithm of which accounts to disabled is based on the formula:
876 877
877 878 Get current user, then super admins in creation order, then regular
878 879 active users in creation order.
879 880
880 881 Using that list we mark all accounts from the end of it as inactive.
881 882 This way we block only latest created accounts.
882 883
883 884 :param expected_users: list of users in special order, we deactivate
884 885 the end N ammoun of users from that list
885 886 """
886 887
887 888 list_of_accounts = self.get_accounts_in_creation_order()
888 889
889 890 for acc_id in list_of_accounts[expected_users + 1:]:
890 891 user = User.get(acc_id)
891 892 log.info('Deactivating account %s for license unlock', user)
892 893 user.active = False
893 894 Session().add(user)
894 895 Session().commit()
895 896
896 897 return
897 898
898 899 def get_user_log(self, user, filter_term):
899 900 user_log = UserLog.query()\
900 901 .filter(or_(UserLog.user_id == user.user_id,
901 902 UserLog.username == user.username))\
902 903 .options(joinedload(UserLog.user))\
903 904 .options(joinedload(UserLog.repository))\
904 905 .order_by(UserLog.action_date.desc())
905 906
906 907 user_log = user_log_filter(user_log, filter_term)
907 908 return user_log
@@ -1,140 +1,140 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import logging
24 24
25 25
26 26 import ipaddress
27 27 import colander
28 28
29 29 from rhodecode.translation import _
30 from rhodecode.lib.utils2 import glob2re
30 from rhodecode.lib.utils2 import glob2re, safe_unicode
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 def ip_addr_validator(node, value):
36 36 try:
37 37 # this raises an ValueError if address is not IpV4 or IpV6
38 ipaddress.ip_network(value, strict=False)
38 ipaddress.ip_network(safe_unicode(value), strict=False)
39 39 except ValueError:
40 40 msg = _(u'Please enter a valid IPv4 or IpV6 address')
41 41 raise colander.Invalid(node, msg)
42 42
43 43
44 44 class IpAddrValidator(object):
45 45 def __init__(self, strict=True):
46 46 self.strict = strict
47 47
48 48 def __call__(self, node, value):
49 49 try:
50 50 # this raises an ValueError if address is not IpV4 or IpV6
51 ipaddress.ip_network(value, strict=self.strict)
51 ipaddress.ip_network(safe_unicode(value), strict=self.strict)
52 52 except ValueError:
53 53 msg = _(u'Please enter a valid IPv4 or IpV6 address')
54 54 raise colander.Invalid(node, msg)
55 55
56 56
57 57 def glob_validator(node, value):
58 58 try:
59 59 re.compile('^' + glob2re(value) + '$')
60 60 except Exception:
61 61 msg = _(u'Invalid glob pattern')
62 62 raise colander.Invalid(node, msg)
63 63
64 64
65 65 def valid_name_validator(node, value):
66 66 from rhodecode.model.validation_schema import types
67 67 if value is types.RootLocation:
68 68 return
69 69
70 70 msg = _('Name must start with a letter or number. Got `{}`').format(value)
71 71 if not re.match(r'^[a-zA-z0-9]{1,}', value):
72 72 raise colander.Invalid(node, msg)
73 73
74 74
75 75 class InvalidCloneUrl(Exception):
76 76 allowed_prefixes = ()
77 77
78 78
79 79 def url_validator(url, repo_type, config):
80 80 from rhodecode.lib.vcs.backends.hg import MercurialRepository
81 81 from rhodecode.lib.vcs.backends.git import GitRepository
82 82 from rhodecode.lib.vcs.backends.svn import SubversionRepository
83 83
84 84 if repo_type == 'hg':
85 85 allowed_prefixes = ('http', 'svn+http', 'git+http')
86 86
87 87 if 'http' in url[:4]:
88 88 # initially check if it's at least the proper URL
89 89 # or does it pass basic auth
90 90
91 91 MercurialRepository.check_url(url, config)
92 92 elif 'svn+http' in url[:8]: # svn->hg import
93 93 SubversionRepository.check_url(url, config)
94 94 elif 'git+http' in url[:8]: # git->hg import
95 95 raise NotImplementedError()
96 96 else:
97 97 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
98 98 'Allowed url must start with one of %s'
99 99 % (url, ','.join(allowed_prefixes)))
100 100 exc.allowed_prefixes = allowed_prefixes
101 101 raise exc
102 102
103 103 elif repo_type == 'git':
104 104 allowed_prefixes = ('http', 'svn+http', 'hg+http')
105 105 if 'http' in url[:4]:
106 106 # initially check if it's at least the proper URL
107 107 # or does it pass basic auth
108 108 GitRepository.check_url(url, config)
109 109 elif 'svn+http' in url[:8]: # svn->git import
110 110 raise NotImplementedError()
111 111 elif 'hg+http' in url[:8]: # hg->git import
112 112 raise NotImplementedError()
113 113 else:
114 114 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
115 115 'Allowed url must start with one of %s'
116 116 % (url, ','.join(allowed_prefixes)))
117 117 exc.allowed_prefixes = allowed_prefixes
118 118 raise exc
119 119
120 120
121 121 class CloneUriValidator(object):
122 122 def __init__(self, repo_type):
123 123 self.repo_type = repo_type
124 124
125 125 def __call__(self, node, value):
126 126 from rhodecode.lib.utils import make_db_config
127 127 try:
128 128 config = make_db_config(clear_session=False)
129 129 url_validator(value, self.repo_type, config)
130 130 except InvalidCloneUrl as e:
131 131 log.warning(e)
132 132 msg = _(u'Invalid clone url, provide a valid clone '
133 133 u'url starting with one of {allowed_prefixes}').format(
134 134 allowed_prefixes=e.allowed_prefixes)
135 135 raise colander.Invalid(node, msg)
136 136 except Exception:
137 137 log.exception('Url validation failed')
138 138 msg = _(u'invalid clone url for {repo_type} repository').format(
139 139 repo_type=self.repo_type)
140 140 raise colander.Invalid(node, msg)
@@ -1,1122 +1,1122 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Set of generic validators
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 from collections import defaultdict
29 29
30 30 import formencode
31 31 import ipaddress
32 32 from formencode.validators import (
33 33 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
34 34 NotEmpty, IPAddress, CIDR, String, FancyValidator
35 35 )
36 36 from pylons.i18n.translation import _
37 37 from sqlalchemy.sql.expression import true
38 38 from sqlalchemy.util import OrderedSet
39 39 from webhelpers.pylonslib.secure_form import authentication_token
40 40
41 41 from rhodecode.authentication import (
42 42 legacy_plugin_prefix, _import_legacy_plugin)
43 43 from rhodecode.authentication.base import loadplugin
44 44 from rhodecode.config.routing import ADMIN_PREFIX
45 45 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
46 46 from rhodecode.lib.utils import repo_name_slug, make_db_config
47 from rhodecode.lib.utils2 import safe_int, str2bool, aslist, md5
47 from rhodecode.lib.utils2 import safe_int, str2bool, aslist, md5, safe_unicode
48 48 from rhodecode.lib.vcs.backends.git.repository import GitRepository
49 49 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
50 50 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
51 51 from rhodecode.model.db import (
52 52 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54
55 55 # silence warnings and pylint
56 56 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
57 57 NotEmpty, IPAddress, CIDR, String, FancyValidator
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class _Missing(object):
63 63 pass
64 64
65 65 Missing = _Missing()
66 66
67 67
68 68 class StateObj(object):
69 69 """
70 70 this is needed to translate the messages using _() in validators
71 71 """
72 72 _ = staticmethod(_)
73 73
74 74
75 75 def M(self, key, state=None, **kwargs):
76 76 """
77 77 returns string from self.message based on given key,
78 78 passed kw params are used to substitute %(named)s params inside
79 79 translated strings
80 80
81 81 :param msg:
82 82 :param state:
83 83 """
84 84 if state is None:
85 85 state = StateObj()
86 86 else:
87 87 state._ = staticmethod(_)
88 88 # inject validator into state object
89 89 return self.message(key, state, **kwargs)
90 90
91 91
92 92 def UniqueList(convert=None):
93 93 class _UniqueList(formencode.FancyValidator):
94 94 """
95 95 Unique List !
96 96 """
97 97 messages = {
98 98 'empty': _(u'Value cannot be an empty list'),
99 99 'missing_value': _(u'Value cannot be an empty list'),
100 100 }
101 101
102 102 def _to_python(self, value, state):
103 103 ret_val = []
104 104
105 105 def make_unique(value):
106 106 seen = []
107 107 return [c for c in value if not (c in seen or seen.append(c))]
108 108
109 109 if isinstance(value, list):
110 110 ret_val = make_unique(value)
111 111 elif isinstance(value, set):
112 112 ret_val = make_unique(list(value))
113 113 elif isinstance(value, tuple):
114 114 ret_val = make_unique(list(value))
115 115 elif value is None:
116 116 ret_val = []
117 117 else:
118 118 ret_val = [value]
119 119
120 120 if convert:
121 121 ret_val = map(convert, ret_val)
122 122 return ret_val
123 123
124 124 def empty_value(self, value):
125 125 return []
126 126
127 127 return _UniqueList
128 128
129 129
130 130 def UniqueListFromString():
131 131 class _UniqueListFromString(UniqueList()):
132 132 def _to_python(self, value, state):
133 133 if isinstance(value, basestring):
134 134 value = aslist(value, ',')
135 135 return super(_UniqueListFromString, self)._to_python(value, state)
136 136 return _UniqueListFromString
137 137
138 138
139 139 def ValidSvnPattern(section, repo_name=None):
140 140 class _validator(formencode.validators.FancyValidator):
141 141 messages = {
142 142 'pattern_exists': _(u'Pattern already exists'),
143 143 }
144 144
145 145 def validate_python(self, value, state):
146 146 if not value:
147 147 return
148 148 model = VcsSettingsModel(repo=repo_name)
149 149 ui_settings = model.get_svn_patterns(section=section)
150 150 for entry in ui_settings:
151 151 if value == entry.value:
152 152 msg = M(self, 'pattern_exists', state)
153 153 raise formencode.Invalid(msg, value, state)
154 154 return _validator
155 155
156 156
157 157 def ValidUsername(edit=False, old_data={}):
158 158 class _validator(formencode.validators.FancyValidator):
159 159 messages = {
160 160 'username_exists': _(u'Username "%(username)s" already exists'),
161 161 'system_invalid_username':
162 162 _(u'Username "%(username)s" is forbidden'),
163 163 'invalid_username':
164 164 _(u'Username may only contain alphanumeric characters '
165 165 u'underscores, periods or dashes and must begin with '
166 166 u'alphanumeric character or underscore')
167 167 }
168 168
169 169 def validate_python(self, value, state):
170 170 if value in ['default', 'new_user']:
171 171 msg = M(self, 'system_invalid_username', state, username=value)
172 172 raise formencode.Invalid(msg, value, state)
173 173 # check if user is unique
174 174 old_un = None
175 175 if edit:
176 176 old_un = User.get(old_data.get('user_id')).username
177 177
178 178 if old_un != value or not edit:
179 179 if User.get_by_username(value, case_insensitive=True):
180 180 msg = M(self, 'username_exists', state, username=value)
181 181 raise formencode.Invalid(msg, value, state)
182 182
183 183 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
184 184 is None):
185 185 msg = M(self, 'invalid_username', state)
186 186 raise formencode.Invalid(msg, value, state)
187 187 return _validator
188 188
189 189
190 190 def ValidRegex(msg=None):
191 191 class _validator(formencode.validators.Regex):
192 192 messages = {'invalid': msg or _(u'The input is not valid')}
193 193 return _validator
194 194
195 195
196 196 def ValidRepoUser(allow_disabled=False):
197 197 class _validator(formencode.validators.FancyValidator):
198 198 messages = {
199 199 'invalid_username': _(u'Username %(username)s is not valid'),
200 200 'disabled_username': _(u'Username %(username)s is disabled')
201 201 }
202 202
203 203 def validate_python(self, value, state):
204 204 try:
205 205 user = User.query().filter(User.username == value).one()
206 206 except Exception:
207 207 msg = M(self, 'invalid_username', state, username=value)
208 208 raise formencode.Invalid(
209 209 msg, value, state, error_dict={'username': msg}
210 210 )
211 211 if user and (not allow_disabled and not user.active):
212 212 msg = M(self, 'disabled_username', state, username=value)
213 213 raise formencode.Invalid(
214 214 msg, value, state, error_dict={'username': msg}
215 215 )
216 216
217 217 return _validator
218 218
219 219
220 220 def ValidUserGroup(edit=False, old_data={}):
221 221 class _validator(formencode.validators.FancyValidator):
222 222 messages = {
223 223 'invalid_group': _(u'Invalid user group name'),
224 224 'group_exist': _(u'User group "%(usergroup)s" already exists'),
225 225 'invalid_usergroup_name':
226 226 _(u'user group name may only contain alphanumeric '
227 227 u'characters underscores, periods or dashes and must begin '
228 228 u'with alphanumeric character')
229 229 }
230 230
231 231 def validate_python(self, value, state):
232 232 if value in ['default']:
233 233 msg = M(self, 'invalid_group', state)
234 234 raise formencode.Invalid(
235 235 msg, value, state, error_dict={'users_group_name': msg}
236 236 )
237 237 # check if group is unique
238 238 old_ugname = None
239 239 if edit:
240 240 old_id = old_data.get('users_group_id')
241 241 old_ugname = UserGroup.get(old_id).users_group_name
242 242
243 243 if old_ugname != value or not edit:
244 244 is_existing_group = UserGroup.get_by_group_name(
245 245 value, case_insensitive=True)
246 246 if is_existing_group:
247 247 msg = M(self, 'group_exist', state, usergroup=value)
248 248 raise formencode.Invalid(
249 249 msg, value, state, error_dict={'users_group_name': msg}
250 250 )
251 251
252 252 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
253 253 msg = M(self, 'invalid_usergroup_name', state)
254 254 raise formencode.Invalid(
255 255 msg, value, state, error_dict={'users_group_name': msg}
256 256 )
257 257
258 258 return _validator
259 259
260 260
261 261 def ValidRepoGroup(edit=False, old_data={}, can_create_in_root=False):
262 262 class _validator(formencode.validators.FancyValidator):
263 263 messages = {
264 264 'group_parent_id': _(u'Cannot assign this group as parent'),
265 265 'group_exists': _(u'Group "%(group_name)s" already exists'),
266 266 'repo_exists': _(u'Repository with name "%(group_name)s" '
267 267 u'already exists'),
268 268 'permission_denied': _(u"no permission to store repository group"
269 269 u"in this location"),
270 270 'permission_denied_root': _(
271 271 u"no permission to store repository group "
272 272 u"in root location")
273 273 }
274 274
275 275 def _to_python(self, value, state):
276 276 group_name = repo_name_slug(value.get('group_name', ''))
277 277 group_parent_id = safe_int(value.get('group_parent_id'))
278 278 gr = RepoGroup.get(group_parent_id)
279 279 if gr:
280 280 parent_group_path = gr.full_path
281 281 # value needs to be aware of group name in order to check
282 282 # db key This is an actual just the name to store in the
283 283 # database
284 284 group_name_full = (
285 285 parent_group_path + RepoGroup.url_sep() + group_name)
286 286 else:
287 287 group_name_full = group_name
288 288
289 289 value['group_name'] = group_name
290 290 value['group_name_full'] = group_name_full
291 291 value['group_parent_id'] = group_parent_id
292 292 return value
293 293
294 294 def validate_python(self, value, state):
295 295
296 296 old_group_name = None
297 297 group_name = value.get('group_name')
298 298 group_name_full = value.get('group_name_full')
299 299 group_parent_id = safe_int(value.get('group_parent_id'))
300 300 if group_parent_id == -1:
301 301 group_parent_id = None
302 302
303 303 group_obj = RepoGroup.get(old_data.get('group_id'))
304 304 parent_group_changed = False
305 305 if edit:
306 306 old_group_name = group_obj.group_name
307 307 old_group_parent_id = group_obj.group_parent_id
308 308
309 309 if group_parent_id != old_group_parent_id:
310 310 parent_group_changed = True
311 311
312 312 # TODO: mikhail: the following if statement is not reached
313 313 # since group_parent_id's OneOf validation fails before.
314 314 # Can be removed.
315 315
316 316 # check against setting a parent of self
317 317 parent_of_self = (
318 318 old_data['group_id'] == group_parent_id
319 319 if group_parent_id else False
320 320 )
321 321 if parent_of_self:
322 322 msg = M(self, 'group_parent_id', state)
323 323 raise formencode.Invalid(
324 324 msg, value, state, error_dict={'group_parent_id': msg}
325 325 )
326 326
327 327 # group we're moving current group inside
328 328 child_group = None
329 329 if group_parent_id:
330 330 child_group = RepoGroup.query().filter(
331 331 RepoGroup.group_id == group_parent_id).scalar()
332 332
333 333 # do a special check that we cannot move a group to one of
334 334 # it's children
335 335 if edit and child_group:
336 336 parents = [x.group_id for x in child_group.parents]
337 337 move_to_children = old_data['group_id'] in parents
338 338 if move_to_children:
339 339 msg = M(self, 'group_parent_id', state)
340 340 raise formencode.Invalid(
341 341 msg, value, state, error_dict={'group_parent_id': msg})
342 342
343 343 # Check if we have permission to store in the parent.
344 344 # Only check if the parent group changed.
345 345 if parent_group_changed:
346 346 if child_group is None:
347 347 if not can_create_in_root:
348 348 msg = M(self, 'permission_denied_root', state)
349 349 raise formencode.Invalid(
350 350 msg, value, state,
351 351 error_dict={'group_parent_id': msg})
352 352 else:
353 353 valid = HasRepoGroupPermissionAny('group.admin')
354 354 forbidden = not valid(
355 355 child_group.group_name, 'can create group validator')
356 356 if forbidden:
357 357 msg = M(self, 'permission_denied', state)
358 358 raise formencode.Invalid(
359 359 msg, value, state,
360 360 error_dict={'group_parent_id': msg})
361 361
362 362 # if we change the name or it's new group, check for existing names
363 363 # or repositories with the same name
364 364 if old_group_name != group_name_full or not edit:
365 365 # check group
366 366 gr = RepoGroup.get_by_group_name(group_name_full)
367 367 if gr:
368 368 msg = M(self, 'group_exists', state, group_name=group_name)
369 369 raise formencode.Invalid(
370 370 msg, value, state, error_dict={'group_name': msg})
371 371
372 372 # check for same repo
373 373 repo = Repository.get_by_repo_name(group_name_full)
374 374 if repo:
375 375 msg = M(self, 'repo_exists', state, group_name=group_name)
376 376 raise formencode.Invalid(
377 377 msg, value, state, error_dict={'group_name': msg})
378 378
379 379 return _validator
380 380
381 381
382 382 def ValidPassword():
383 383 class _validator(formencode.validators.FancyValidator):
384 384 messages = {
385 385 'invalid_password':
386 386 _(u'Invalid characters (non-ascii) in password')
387 387 }
388 388
389 389 def validate_python(self, value, state):
390 390 try:
391 391 (value or '').decode('ascii')
392 392 except UnicodeError:
393 393 msg = M(self, 'invalid_password', state)
394 394 raise formencode.Invalid(msg, value, state,)
395 395 return _validator
396 396
397 397
398 398 def ValidOldPassword(username):
399 399 class _validator(formencode.validators.FancyValidator):
400 400 messages = {
401 401 'invalid_password': _(u'Invalid old password')
402 402 }
403 403
404 404 def validate_python(self, value, state):
405 405 from rhodecode.authentication.base import authenticate, HTTP_TYPE
406 406 if not authenticate(username, value, '', HTTP_TYPE):
407 407 msg = M(self, 'invalid_password', state)
408 408 raise formencode.Invalid(
409 409 msg, value, state, error_dict={'current_password': msg}
410 410 )
411 411 return _validator
412 412
413 413
414 414 def ValidPasswordsMatch(
415 415 passwd='new_password', passwd_confirmation='password_confirmation'):
416 416 class _validator(formencode.validators.FancyValidator):
417 417 messages = {
418 418 'password_mismatch': _(u'Passwords do not match'),
419 419 }
420 420
421 421 def validate_python(self, value, state):
422 422
423 423 pass_val = value.get('password') or value.get(passwd)
424 424 if pass_val != value[passwd_confirmation]:
425 425 msg = M(self, 'password_mismatch', state)
426 426 raise formencode.Invalid(
427 427 msg, value, state,
428 428 error_dict={passwd: msg, passwd_confirmation: msg}
429 429 )
430 430 return _validator
431 431
432 432
433 433 def ValidAuth():
434 434 class _validator(formencode.validators.FancyValidator):
435 435 messages = {
436 436 'invalid_password': _(u'invalid password'),
437 437 'invalid_username': _(u'invalid user name'),
438 438 'disabled_account': _(u'Your account is disabled')
439 439 }
440 440
441 441 def validate_python(self, value, state):
442 442 from rhodecode.authentication.base import authenticate, HTTP_TYPE
443 443
444 444 password = value['password']
445 445 username = value['username']
446 446
447 447 if not authenticate(username, password, '', HTTP_TYPE,
448 448 skip_missing=True):
449 449 user = User.get_by_username(username)
450 450 if user and not user.active:
451 451 log.warning('user %s is disabled', username)
452 452 msg = M(self, 'disabled_account', state)
453 453 raise formencode.Invalid(
454 454 msg, value, state, error_dict={'username': msg}
455 455 )
456 456 else:
457 457 log.warning('user `%s` failed to authenticate', username)
458 458 msg = M(self, 'invalid_username', state)
459 459 msg2 = M(self, 'invalid_password', state)
460 460 raise formencode.Invalid(
461 461 msg, value, state,
462 462 error_dict={'username': msg, 'password': msg2}
463 463 )
464 464 return _validator
465 465
466 466
467 467 def ValidAuthToken():
468 468 class _validator(formencode.validators.FancyValidator):
469 469 messages = {
470 470 'invalid_token': _(u'Token mismatch')
471 471 }
472 472
473 473 def validate_python(self, value, state):
474 474 if value != authentication_token():
475 475 msg = M(self, 'invalid_token', state)
476 476 raise formencode.Invalid(msg, value, state)
477 477 return _validator
478 478
479 479
480 480 def ValidRepoName(edit=False, old_data={}):
481 481 class _validator(formencode.validators.FancyValidator):
482 482 messages = {
483 483 'invalid_repo_name':
484 484 _(u'Repository name %(repo)s is disallowed'),
485 485 # top level
486 486 'repository_exists': _(u'Repository with name %(repo)s '
487 487 u'already exists'),
488 488 'group_exists': _(u'Repository group with name "%(repo)s" '
489 489 u'already exists'),
490 490 # inside a group
491 491 'repository_in_group_exists': _(u'Repository with name %(repo)s '
492 492 u'exists in group "%(group)s"'),
493 493 'group_in_group_exists': _(
494 494 u'Repository group with name "%(repo)s" '
495 495 u'exists in group "%(group)s"'),
496 496 }
497 497
498 498 def _to_python(self, value, state):
499 499 repo_name = repo_name_slug(value.get('repo_name', ''))
500 500 repo_group = value.get('repo_group')
501 501 if repo_group:
502 502 gr = RepoGroup.get(repo_group)
503 503 group_path = gr.full_path
504 504 group_name = gr.group_name
505 505 # value needs to be aware of group name in order to check
506 506 # db key This is an actual just the name to store in the
507 507 # database
508 508 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
509 509 else:
510 510 group_name = group_path = ''
511 511 repo_name_full = repo_name
512 512
513 513 value['repo_name'] = repo_name
514 514 value['repo_name_full'] = repo_name_full
515 515 value['group_path'] = group_path
516 516 value['group_name'] = group_name
517 517 return value
518 518
519 519 def validate_python(self, value, state):
520 520
521 521 repo_name = value.get('repo_name')
522 522 repo_name_full = value.get('repo_name_full')
523 523 group_path = value.get('group_path')
524 524 group_name = value.get('group_name')
525 525
526 526 if repo_name in [ADMIN_PREFIX, '']:
527 527 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
528 528 raise formencode.Invalid(
529 529 msg, value, state, error_dict={'repo_name': msg})
530 530
531 531 rename = old_data.get('repo_name') != repo_name_full
532 532 create = not edit
533 533 if rename or create:
534 534
535 535 if group_path:
536 536 if Repository.get_by_repo_name(repo_name_full):
537 537 msg = M(self, 'repository_in_group_exists', state,
538 538 repo=repo_name, group=group_name)
539 539 raise formencode.Invalid(
540 540 msg, value, state, error_dict={'repo_name': msg})
541 541 if RepoGroup.get_by_group_name(repo_name_full):
542 542 msg = M(self, 'group_in_group_exists', state,
543 543 repo=repo_name, group=group_name)
544 544 raise formencode.Invalid(
545 545 msg, value, state, error_dict={'repo_name': msg})
546 546 else:
547 547 if RepoGroup.get_by_group_name(repo_name_full):
548 548 msg = M(self, 'group_exists', state, repo=repo_name)
549 549 raise formencode.Invalid(
550 550 msg, value, state, error_dict={'repo_name': msg})
551 551
552 552 if Repository.get_by_repo_name(repo_name_full):
553 553 msg = M(
554 554 self, 'repository_exists', state, repo=repo_name)
555 555 raise formencode.Invalid(
556 556 msg, value, state, error_dict={'repo_name': msg})
557 557 return value
558 558 return _validator
559 559
560 560
561 561 def ValidForkName(*args, **kwargs):
562 562 return ValidRepoName(*args, **kwargs)
563 563
564 564
565 565 def SlugifyName():
566 566 class _validator(formencode.validators.FancyValidator):
567 567
568 568 def _to_python(self, value, state):
569 569 return repo_name_slug(value)
570 570
571 571 def validate_python(self, value, state):
572 572 pass
573 573
574 574 return _validator
575 575
576 576
577 577 def CannotHaveGitSuffix():
578 578 class _validator(formencode.validators.FancyValidator):
579 579 messages = {
580 580 'has_git_suffix':
581 581 _(u'Repository name cannot end with .git'),
582 582 }
583 583
584 584 def _to_python(self, value, state):
585 585 return value
586 586
587 587 def validate_python(self, value, state):
588 588 if value and value.endswith('.git'):
589 589 msg = M(
590 590 self, 'has_git_suffix', state)
591 591 raise formencode.Invalid(
592 592 msg, value, state, error_dict={'repo_name': msg})
593 593
594 594 return _validator
595 595
596 596
597 597 def ValidCloneUri():
598 598 class InvalidCloneUrl(Exception):
599 599 allowed_prefixes = ()
600 600
601 601 def url_handler(repo_type, url):
602 602 config = make_db_config(clear_session=False)
603 603 if repo_type == 'hg':
604 604 allowed_prefixes = ('http', 'svn+http', 'git+http')
605 605
606 606 if 'http' in url[:4]:
607 607 # initially check if it's at least the proper URL
608 608 # or does it pass basic auth
609 609 MercurialRepository.check_url(url, config)
610 610 elif 'svn+http' in url[:8]: # svn->hg import
611 611 SubversionRepository.check_url(url, config)
612 612 elif 'git+http' in url[:8]: # git->hg import
613 613 raise NotImplementedError()
614 614 else:
615 615 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
616 616 'Allowed url must start with one of %s'
617 617 % (url, ','.join(allowed_prefixes)))
618 618 exc.allowed_prefixes = allowed_prefixes
619 619 raise exc
620 620
621 621 elif repo_type == 'git':
622 622 allowed_prefixes = ('http', 'svn+http', 'hg+http')
623 623 if 'http' in url[:4]:
624 624 # initially check if it's at least the proper URL
625 625 # or does it pass basic auth
626 626 GitRepository.check_url(url, config)
627 627 elif 'svn+http' in url[:8]: # svn->git import
628 628 raise NotImplementedError()
629 629 elif 'hg+http' in url[:8]: # hg->git import
630 630 raise NotImplementedError()
631 631 else:
632 632 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
633 633 'Allowed url must start with one of %s'
634 634 % (url, ','.join(allowed_prefixes)))
635 635 exc.allowed_prefixes = allowed_prefixes
636 636 raise exc
637 637
638 638 class _validator(formencode.validators.FancyValidator):
639 639 messages = {
640 640 'clone_uri': _(u'invalid clone url for %(rtype)s repository'),
641 641 'invalid_clone_uri': _(
642 642 u'Invalid clone url, provide a valid clone '
643 643 u'url starting with one of %(allowed_prefixes)s')
644 644 }
645 645
646 646 def validate_python(self, value, state):
647 647 repo_type = value.get('repo_type')
648 648 url = value.get('clone_uri')
649 649
650 650 if url:
651 651 try:
652 652 url_handler(repo_type, url)
653 653 except InvalidCloneUrl as e:
654 654 log.warning(e)
655 655 msg = M(self, 'invalid_clone_uri', rtype=repo_type,
656 656 allowed_prefixes=','.join(e.allowed_prefixes))
657 657 raise formencode.Invalid(msg, value, state,
658 658 error_dict={'clone_uri': msg})
659 659 except Exception:
660 660 log.exception('Url validation failed')
661 661 msg = M(self, 'clone_uri', rtype=repo_type)
662 662 raise formencode.Invalid(msg, value, state,
663 663 error_dict={'clone_uri': msg})
664 664 return _validator
665 665
666 666
667 667 def ValidForkType(old_data={}):
668 668 class _validator(formencode.validators.FancyValidator):
669 669 messages = {
670 670 'invalid_fork_type': _(u'Fork have to be the same type as parent')
671 671 }
672 672
673 673 def validate_python(self, value, state):
674 674 if old_data['repo_type'] != value:
675 675 msg = M(self, 'invalid_fork_type', state)
676 676 raise formencode.Invalid(
677 677 msg, value, state, error_dict={'repo_type': msg}
678 678 )
679 679 return _validator
680 680
681 681
682 682 def CanWriteGroup(old_data=None):
683 683 class _validator(formencode.validators.FancyValidator):
684 684 messages = {
685 685 'permission_denied': _(
686 686 u"You do not have the permission "
687 687 u"to create repositories in this group."),
688 688 'permission_denied_root': _(
689 689 u"You do not have the permission to store repositories in "
690 690 u"the root location.")
691 691 }
692 692
693 693 def _to_python(self, value, state):
694 694 # root location
695 695 if value in [-1, "-1"]:
696 696 return None
697 697 return value
698 698
699 699 def validate_python(self, value, state):
700 700 gr = RepoGroup.get(value)
701 701 gr_name = gr.group_name if gr else None # None means ROOT location
702 702 # create repositories with write permission on group is set to true
703 703 create_on_write = HasPermissionAny(
704 704 'hg.create.write_on_repogroup.true')()
705 705 group_admin = HasRepoGroupPermissionAny('group.admin')(
706 706 gr_name, 'can write into group validator')
707 707 group_write = HasRepoGroupPermissionAny('group.write')(
708 708 gr_name, 'can write into group validator')
709 709 forbidden = not (group_admin or (group_write and create_on_write))
710 710 can_create_repos = HasPermissionAny(
711 711 'hg.admin', 'hg.create.repository')
712 712 gid = (old_data['repo_group'].get('group_id')
713 713 if (old_data and 'repo_group' in old_data) else None)
714 714 value_changed = gid != safe_int(value)
715 715 new = not old_data
716 716 # do check if we changed the value, there's a case that someone got
717 717 # revoked write permissions to a repository, he still created, we
718 718 # don't need to check permission if he didn't change the value of
719 719 # groups in form box
720 720 if value_changed or new:
721 721 # parent group need to be existing
722 722 if gr and forbidden:
723 723 msg = M(self, 'permission_denied', state)
724 724 raise formencode.Invalid(
725 725 msg, value, state, error_dict={'repo_type': msg}
726 726 )
727 727 # check if we can write to root location !
728 728 elif gr is None and not can_create_repos():
729 729 msg = M(self, 'permission_denied_root', state)
730 730 raise formencode.Invalid(
731 731 msg, value, state, error_dict={'repo_type': msg}
732 732 )
733 733
734 734 return _validator
735 735
736 736
737 737 def ValidPerms(type_='repo'):
738 738 if type_ == 'repo_group':
739 739 EMPTY_PERM = 'group.none'
740 740 elif type_ == 'repo':
741 741 EMPTY_PERM = 'repository.none'
742 742 elif type_ == 'user_group':
743 743 EMPTY_PERM = 'usergroup.none'
744 744
745 745 class _validator(formencode.validators.FancyValidator):
746 746 messages = {
747 747 'perm_new_member_name':
748 748 _(u'This username or user group name is not valid')
749 749 }
750 750
751 751 def _to_python(self, value, state):
752 752 perm_updates = OrderedSet()
753 753 perm_additions = OrderedSet()
754 754 perm_deletions = OrderedSet()
755 755 # build a list of permission to update/delete and new permission
756 756
757 757 # Read the perm_new_member/perm_del_member attributes and group
758 758 # them by they IDs
759 759 new_perms_group = defaultdict(dict)
760 760 del_perms_group = defaultdict(dict)
761 761 for k, v in value.copy().iteritems():
762 762 if k.startswith('perm_del_member'):
763 763 # delete from org storage so we don't process that later
764 764 del value[k]
765 765 # part is `id`, `type`
766 766 _type, part = k.split('perm_del_member_')
767 767 args = part.split('_')
768 768 if len(args) == 2:
769 769 _key, pos = args
770 770 del_perms_group[pos][_key] = v
771 771 if k.startswith('perm_new_member'):
772 772 # delete from org storage so we don't process that later
773 773 del value[k]
774 774 # part is `id`, `type`, `perm`
775 775 _type, part = k.split('perm_new_member_')
776 776 args = part.split('_')
777 777 if len(args) == 2:
778 778 _key, pos = args
779 779 new_perms_group[pos][_key] = v
780 780
781 781 # store the deletes
782 782 for k in sorted(del_perms_group.keys()):
783 783 perm_dict = del_perms_group[k]
784 784 del_member = perm_dict.get('id')
785 785 del_type = perm_dict.get('type')
786 786 if del_member and del_type:
787 787 perm_deletions.add(
788 788 (del_member, None, del_type))
789 789
790 790 # store additions in order of how they were added in web form
791 791 for k in sorted(new_perms_group.keys()):
792 792 perm_dict = new_perms_group[k]
793 793 new_member = perm_dict.get('id')
794 794 new_type = perm_dict.get('type')
795 795 new_perm = perm_dict.get('perm')
796 796 if new_member and new_perm and new_type:
797 797 perm_additions.add(
798 798 (new_member, new_perm, new_type))
799 799
800 800 # get updates of permissions
801 801 # (read the existing radio button states)
802 802 default_user_id = User.get_default_user().user_id
803 803 for k, update_value in value.iteritems():
804 804 if k.startswith('u_perm_') or k.startswith('g_perm_'):
805 805 member = k[7:]
806 806 update_type = {'u': 'user',
807 807 'g': 'users_group'}[k[0]]
808 808
809 809 if safe_int(member) == default_user_id:
810 810 if str2bool(value.get('repo_private')):
811 811 # prevent from updating default user permissions
812 812 # when this repository is marked as private
813 813 update_value = EMPTY_PERM
814 814
815 815 perm_updates.add(
816 816 (member, update_value, update_type))
817 817
818 818 value['perm_additions'] = [] # propagated later
819 819 value['perm_updates'] = list(perm_updates)
820 820 value['perm_deletions'] = list(perm_deletions)
821 821
822 822 updates_map = dict(
823 823 (x[0], (x[1], x[2])) for x in value['perm_updates'])
824 824 # make sure Additions don't override updates.
825 825 for member_id, perm, member_type in list(perm_additions):
826 826 if member_id in updates_map:
827 827 perm = updates_map[member_id][0]
828 828 value['perm_additions'].append((member_id, perm, member_type))
829 829
830 830 # on new entries validate users they exist and they are active !
831 831 # this leaves feedback to the form
832 832 try:
833 833 if member_type == 'user':
834 834 User.query()\
835 835 .filter(User.active == true())\
836 836 .filter(User.user_id == member_id).one()
837 837 if member_type == 'users_group':
838 838 UserGroup.query()\
839 839 .filter(UserGroup.users_group_active == true())\
840 840 .filter(UserGroup.users_group_id == member_id)\
841 841 .one()
842 842
843 843 except Exception:
844 844 log.exception('Updated permission failed: org_exc:')
845 845 msg = M(self, 'perm_new_member_type', state)
846 846 raise formencode.Invalid(
847 847 msg, value, state, error_dict={
848 848 'perm_new_member_name': msg}
849 849 )
850 850 return value
851 851 return _validator
852 852
853 853
854 854 def ValidSettings():
855 855 class _validator(formencode.validators.FancyValidator):
856 856 def _to_python(self, value, state):
857 857 # settings form for users that are not admin
858 858 # can't edit certain parameters, it's extra backup if they mangle
859 859 # with forms
860 860
861 861 forbidden_params = [
862 862 'user', 'repo_type', 'repo_enable_locking',
863 863 'repo_enable_downloads', 'repo_enable_statistics'
864 864 ]
865 865
866 866 for param in forbidden_params:
867 867 if param in value:
868 868 del value[param]
869 869 return value
870 870
871 871 def validate_python(self, value, state):
872 872 pass
873 873 return _validator
874 874
875 875
876 876 def ValidPath():
877 877 class _validator(formencode.validators.FancyValidator):
878 878 messages = {
879 879 'invalid_path': _(u'This is not a valid path')
880 880 }
881 881
882 882 def validate_python(self, value, state):
883 883 if not os.path.isdir(value):
884 884 msg = M(self, 'invalid_path', state)
885 885 raise formencode.Invalid(
886 886 msg, value, state, error_dict={'paths_root_path': msg}
887 887 )
888 888 return _validator
889 889
890 890
891 891 def UniqSystemEmail(old_data={}):
892 892 class _validator(formencode.validators.FancyValidator):
893 893 messages = {
894 894 'email_taken': _(u'This e-mail address is already taken')
895 895 }
896 896
897 897 def _to_python(self, value, state):
898 898 return value.lower()
899 899
900 900 def validate_python(self, value, state):
901 901 if (old_data.get('email') or '').lower() != value:
902 902 user = User.get_by_email(value, case_insensitive=True)
903 903 if user:
904 904 msg = M(self, 'email_taken', state)
905 905 raise formencode.Invalid(
906 906 msg, value, state, error_dict={'email': msg}
907 907 )
908 908 return _validator
909 909
910 910
911 911 def ValidSystemEmail():
912 912 class _validator(formencode.validators.FancyValidator):
913 913 messages = {
914 914 'non_existing_email': _(u'e-mail "%(email)s" does not exist.')
915 915 }
916 916
917 917 def _to_python(self, value, state):
918 918 return value.lower()
919 919
920 920 def validate_python(self, value, state):
921 921 user = User.get_by_email(value, case_insensitive=True)
922 922 if user is None:
923 923 msg = M(self, 'non_existing_email', state, email=value)
924 924 raise formencode.Invalid(
925 925 msg, value, state, error_dict={'email': msg}
926 926 )
927 927
928 928 return _validator
929 929
930 930
931 931 def NotReviewedRevisions(repo_id):
932 932 class _validator(formencode.validators.FancyValidator):
933 933 messages = {
934 934 'rev_already_reviewed':
935 935 _(u'Revisions %(revs)s are already part of pull request '
936 936 u'or have set status'),
937 937 }
938 938
939 939 def validate_python(self, value, state):
940 940 # check revisions if they are not reviewed, or a part of another
941 941 # pull request
942 942 statuses = ChangesetStatus.query()\
943 943 .filter(ChangesetStatus.revision.in_(value))\
944 944 .filter(ChangesetStatus.repo_id == repo_id)\
945 945 .all()
946 946
947 947 errors = []
948 948 for status in statuses:
949 949 if status.pull_request_id:
950 950 errors.append(['pull_req', status.revision[:12]])
951 951 elif status.status:
952 952 errors.append(['status', status.revision[:12]])
953 953
954 954 if errors:
955 955 revs = ','.join([x[1] for x in errors])
956 956 msg = M(self, 'rev_already_reviewed', state, revs=revs)
957 957 raise formencode.Invalid(
958 958 msg, value, state, error_dict={'revisions': revs})
959 959
960 960 return _validator
961 961
962 962
963 963 def ValidIp():
964 964 class _validator(CIDR):
965 965 messages = {
966 966 'badFormat': _(u'Please enter a valid IPv4 or IpV6 address'),
967 967 'illegalBits': _(
968 968 u'The network size (bits) must be within the range '
969 969 u'of 0-32 (not %(bits)r)'),
970 970 }
971 971
972 972 # we ovveride the default to_python() call
973 973 def to_python(self, value, state):
974 974 v = super(_validator, self).to_python(value, state)
975 v = v.strip()
975 v = safe_unicode(v.strip())
976 976 net = ipaddress.ip_network(address=v, strict=False)
977 977 return str(net)
978 978
979 979 def validate_python(self, value, state):
980 980 try:
981 addr = value.strip()
981 addr = safe_unicode(value.strip())
982 982 # this raises an ValueError if address is not IpV4 or IpV6
983 983 ipaddress.ip_network(addr, strict=False)
984 984 except ValueError:
985 985 raise formencode.Invalid(self.message('badFormat', state),
986 986 value, state)
987 987
988 988 return _validator
989 989
990 990
991 991 def FieldKey():
992 992 class _validator(formencode.validators.FancyValidator):
993 993 messages = {
994 994 'badFormat': _(
995 995 u'Key name can only consist of letters, '
996 996 u'underscore, dash or numbers'),
997 997 }
998 998
999 999 def validate_python(self, value, state):
1000 1000 if not re.match('[a-zA-Z0-9_-]+$', value):
1001 1001 raise formencode.Invalid(self.message('badFormat', state),
1002 1002 value, state)
1003 1003 return _validator
1004 1004
1005 1005
1006 1006 def ValidAuthPlugins():
1007 1007 class _validator(formencode.validators.FancyValidator):
1008 1008 messages = {
1009 1009 'import_duplicate': _(
1010 1010 u'Plugins %(loaded)s and %(next_to_load)s '
1011 1011 u'both export the same name'),
1012 1012 'missing_includeme': _(
1013 1013 u'The plugin "%(plugin_id)s" is missing an includeme '
1014 1014 u'function.'),
1015 1015 'import_error': _(
1016 1016 u'Can not load plugin "%(plugin_id)s"'),
1017 1017 'no_plugin': _(
1018 1018 u'No plugin available with ID "%(plugin_id)s"'),
1019 1019 }
1020 1020
1021 1021 def _to_python(self, value, state):
1022 1022 # filter empty values
1023 1023 return filter(lambda s: s not in [None, ''], value)
1024 1024
1025 1025 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1026 1026 """
1027 1027 Validates that the plugin import works. It also checks that the
1028 1028 plugin has an includeme attribute.
1029 1029 """
1030 1030 try:
1031 1031 plugin = _import_legacy_plugin(plugin_id)
1032 1032 except Exception as e:
1033 1033 log.exception(
1034 1034 'Exception during import of auth legacy plugin "{}"'
1035 1035 .format(plugin_id))
1036 1036 msg = M(self, 'import_error', plugin_id=plugin_id)
1037 1037 raise formencode.Invalid(msg, value, state)
1038 1038
1039 1039 if not hasattr(plugin, 'includeme'):
1040 1040 msg = M(self, 'missing_includeme', plugin_id=plugin_id)
1041 1041 raise formencode.Invalid(msg, value, state)
1042 1042
1043 1043 return plugin
1044 1044
1045 1045 def _validate_plugin_id(self, plugin_id, value, state):
1046 1046 """
1047 1047 Plugins are already imported during app start up. Therefore this
1048 1048 validation only retrieves the plugin from the plugin registry and
1049 1049 if it returns something not None everything is OK.
1050 1050 """
1051 1051 plugin = loadplugin(plugin_id)
1052 1052
1053 1053 if plugin is None:
1054 1054 msg = M(self, 'no_plugin', plugin_id=plugin_id)
1055 1055 raise formencode.Invalid(msg, value, state)
1056 1056
1057 1057 return plugin
1058 1058
1059 1059 def validate_python(self, value, state):
1060 1060 unique_names = {}
1061 1061 for plugin_id in value:
1062 1062
1063 1063 # Validate legacy or normal plugin.
1064 1064 if plugin_id.startswith(legacy_plugin_prefix):
1065 1065 plugin = self._validate_legacy_plugin_id(
1066 1066 plugin_id, value, state)
1067 1067 else:
1068 1068 plugin = self._validate_plugin_id(plugin_id, value, state)
1069 1069
1070 1070 # Only allow unique plugin names.
1071 1071 if plugin.name in unique_names:
1072 1072 msg = M(self, 'import_duplicate', state,
1073 1073 loaded=unique_names[plugin.name],
1074 1074 next_to_load=plugin)
1075 1075 raise formencode.Invalid(msg, value, state)
1076 1076 unique_names[plugin.name] = plugin
1077 1077
1078 1078 return _validator
1079 1079
1080 1080
1081 1081 def ValidPattern():
1082 1082
1083 1083 class _Validator(formencode.validators.FancyValidator):
1084 1084
1085 1085 def _to_python(self, value, state):
1086 1086 patterns = []
1087 1087
1088 1088 prefix = 'new_pattern'
1089 1089 for name, v in value.iteritems():
1090 1090 pattern_name = '_'.join((prefix, 'pattern'))
1091 1091 if name.startswith(pattern_name):
1092 1092 new_item_id = name[len(pattern_name)+1:]
1093 1093
1094 1094 def _field(name):
1095 1095 return '%s_%s_%s' % (prefix, name, new_item_id)
1096 1096
1097 1097 values = {
1098 1098 'issuetracker_pat': value.get(_field('pattern')),
1099 1099 'issuetracker_pat': value.get(_field('pattern')),
1100 1100 'issuetracker_url': value.get(_field('url')),
1101 1101 'issuetracker_pref': value.get(_field('prefix')),
1102 1102 'issuetracker_desc': value.get(_field('description'))
1103 1103 }
1104 1104 new_uid = md5(values['issuetracker_pat'])
1105 1105
1106 1106 has_required_fields = (
1107 1107 values['issuetracker_pat']
1108 1108 and values['issuetracker_url'])
1109 1109
1110 1110 if has_required_fields:
1111 1111 settings = [
1112 1112 ('_'.join((key, new_uid)), values[key], 'unicode')
1113 1113 for key in values]
1114 1114 patterns.append(settings)
1115 1115
1116 1116 value['patterns'] = patterns
1117 1117 delete_patterns = value.get('uid') or []
1118 1118 if not isinstance(delete_patterns, (list, tuple)):
1119 1119 delete_patterns = [delete_patterns]
1120 1120 value['delete_patterns'] = delete_patterns
1121 1121 return value
1122 1122 return _Validator
@@ -1,323 +1,323 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22 import mock
23 23
24 24 from rhodecode.lib.utils2 import safe_unicode
25 25 from rhodecode.model.db import (
26 26 true, User, UserGroup, UserGroupMember, UserEmailMap, Permission, UserIpMap)
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.model.user import UserModel
29 29 from rhodecode.model.user_group import UserGroupModel
30 30 from rhodecode.model.repo import RepoModel
31 31 from rhodecode.model.repo_group import RepoGroupModel
32 32 from rhodecode.tests.fixture import Fixture
33 33
34 34 fixture = Fixture()
35 35
36 36
37 37 class TestGetUsers(object):
38 38 def test_returns_active_users(self, backend, user_util):
39 39 for i in range(4):
40 40 is_active = i % 2 == 0
41 41 user_util.create_user(active=is_active, lastname='Fake user')
42 42
43 43 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
44 44 users = UserModel().get_users()
45 45 fake_users = [u for u in users if u['last_name'] == 'Fake user']
46 46 assert len(fake_users) == 2
47 47
48 48 expected_keys = (
49 49 'id', 'first_name', 'last_name', 'username', 'icon_link',
50 50 'value_display', 'value', 'value_type')
51 51 for user in users:
52 52 assert user['value_type'] is 'user'
53 53 for key in expected_keys:
54 54 assert key in user
55 55
56 56 def test_returns_user_filtered_by_last_name(self, backend, user_util):
57 57 keywords = ('aBc', u'ΓΌnicode')
58 58 for keyword in keywords:
59 59 for i in range(2):
60 60 user_util.create_user(
61 61 active=True, lastname=u'Fake {} user'.format(keyword))
62 62
63 63 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
64 64 keyword = keywords[1].lower()
65 65 users = UserModel().get_users(name_contains=keyword)
66 66
67 67 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
68 68 assert len(fake_users) == 2
69 69 for user in fake_users:
70 70 assert user['last_name'] == safe_unicode('Fake ΓΌnicode user')
71 71
72 72 def test_returns_user_filtered_by_first_name(self, backend, user_util):
73 73 created_users = []
74 74 keywords = ('aBc', u'ΓΌnicode')
75 75 for keyword in keywords:
76 76 for i in range(2):
77 77 created_users.append(user_util.create_user(
78 78 active=True, lastname='Fake user',
79 79 firstname=u'Fake {} user'.format(keyword)))
80 80
81 81 keyword = keywords[1].lower()
82 82 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
83 83 users = UserModel().get_users(name_contains=keyword)
84 84
85 85 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
86 86 assert len(fake_users) == 2
87 87 for user in fake_users:
88 88 assert user['first_name'] == safe_unicode('Fake ΓΌnicode user')
89 89
90 90 def test_returns_user_filtered_by_username(self, backend, user_util):
91 91 created_users = []
92 92 for i in range(5):
93 93 created_users.append(user_util.create_user(
94 94 active=True, lastname='Fake user'))
95 95
96 96 user_filter = created_users[-1].username[-2:]
97 97 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
98 98 users = UserModel().get_users(name_contains=user_filter)
99 99
100 100 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
101 101 assert len(fake_users) == 1
102 102 assert fake_users[0]['username'] == created_users[-1].username
103 103
104 104 def test_returns_limited_user_list(self, backend, user_util):
105 105 created_users = []
106 106 for i in range(5):
107 107 created_users.append(user_util.create_user(
108 108 active=True, lastname='Fake user'))
109 109
110 110 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
111 111 users = UserModel().get_users(name_contains='Fake', limit=3)
112 112
113 113 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
114 114 assert len(fake_users) == 3
115 115
116 116
117 117 @pytest.fixture
118 118 def test_user(request, pylonsapp):
119 119 usr = UserModel().create_or_update(
120 120 username=u'test_user',
121 121 password=u'qweqwe',
122 122 email=u'main_email@rhodecode.org',
123 123 firstname=u'u1', lastname=u'u1')
124 124 Session().commit()
125 125 assert User.get_by_username(u'test_user') == usr
126 126
127 127 @request.addfinalizer
128 128 def cleanup():
129 129 if UserModel().get_user(usr.user_id) is None:
130 130 return
131 131
132 132 perm = Permission.query().all()
133 133 for p in perm:
134 134 UserModel().revoke_perm(usr, p)
135 135
136 136 UserModel().delete(usr.user_id)
137 137 Session().commit()
138 138
139 139 return usr
140 140
141 141
142 142 def test_create_and_remove(test_user):
143 143 usr = test_user
144 144
145 145 # make user group
146 146 user_group = fixture.create_user_group('some_example_group')
147 147 Session().commit()
148 148
149 149 UserGroupModel().add_user_to_group(user_group, usr)
150 150 Session().commit()
151 151
152 152 assert UserGroup.get(user_group.users_group_id) == user_group
153 153 assert UserGroupMember.query().count() == 1
154 154 UserModel().delete(usr.user_id)
155 155 Session().commit()
156 156
157 157 assert UserGroupMember.query().all() == []
158 158
159 159
160 160 def test_additonal_email_as_main(test_user):
161 161 with pytest.raises(AttributeError):
162 162 m = UserEmailMap()
163 163 m.email = test_user.email
164 164 m.user = test_user
165 165 Session().add(m)
166 166 Session().commit()
167 167
168 168
169 169 def test_extra_email_map(test_user):
170 170
171 171 m = UserEmailMap()
172 172 m.email = u'main_email2@rhodecode.org'
173 173 m.user = test_user
174 174 Session().add(m)
175 175 Session().commit()
176 176
177 177 u = User.get_by_email(email='main_email@rhodecode.org')
178 178 assert test_user.user_id == u.user_id
179 179 assert test_user.username == u.username
180 180
181 181 u = User.get_by_email(email='main_email2@rhodecode.org')
182 182 assert test_user.user_id == u.user_id
183 183 assert test_user.username == u.username
184 184 u = User.get_by_email(email='main_email3@rhodecode.org')
185 185 assert u is None
186 186
187 187
188 188 def test_get_api_data_replaces_secret_data_by_default(test_user):
189 189 api_data = test_user.get_api_data()
190 190 api_key_length = 40
191 191 expected_replacement = '*' * api_key_length
192 192
193 193 for key in api_data['api_keys']:
194 194 assert key == expected_replacement
195 195
196 196
197 197 def test_get_api_data_includes_secret_data_if_activated(test_user):
198 198 api_data = test_user.get_api_data(include_secrets=True)
199 199 assert api_data['api_keys'] == test_user.auth_tokens
200 200
201 201
202 202 def test_add_perm(test_user):
203 203 perm = Permission.query().all()[0]
204 204 UserModel().grant_perm(test_user, perm)
205 205 Session().commit()
206 206 assert UserModel().has_perm(test_user, perm)
207 207
208 208
209 209 def test_has_perm(test_user):
210 210 perm = Permission.query().all()
211 211 for p in perm:
212 212 assert not UserModel().has_perm(test_user, p)
213 213
214 214
215 215 def test_revoke_perm(test_user):
216 216 perm = Permission.query().all()[0]
217 217 UserModel().grant_perm(test_user, perm)
218 218 Session().commit()
219 219 assert UserModel().has_perm(test_user, perm)
220 220
221 221 # revoke
222 222 UserModel().revoke_perm(test_user, perm)
223 223 Session().commit()
224 224 assert not UserModel().has_perm(test_user, perm)
225 225
226 226
227 227 @pytest.mark.parametrize("ip_range, expected, expect_errors", [
228 228 ('', [], False),
229 229 ('127.0.0.1', ['127.0.0.1'], False),
230 230 ('127.0.0.1,127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
231 231 ('127.0.0.1 , 127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
232 232 (
233 233 '127.0.0.1,172.172.172.0,127.0.0.2',
234 234 ['127.0.0.1', '172.172.172.0', '127.0.0.2'], False),
235 235 (
236 236 '127.0.0.1-127.0.0.5',
237 237 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
238 238 False),
239 239 (
240 240 '127.0.0.1 - 127.0.0.5',
241 241 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
242 242 False
243 243 ),
244 244 ('-', [], True),
245 245 ('127.0.0.1-32', [], True),
246 246 (
247 247 '127.0.0.1,127.0.0.1,127.0.0.1,127.0.0.1-127.0.0.2,127.0.0.2',
248 248 ['127.0.0.1', '127.0.0.2'], False),
249 249 (
250 250 '127.0.0.1-127.0.0.2,127.0.0.4-127.0.0.6,',
251 251 ['127.0.0.1', '127.0.0.2', '127.0.0.4', '127.0.0.5', '127.0.0.6'],
252 252 False
253 253 ),
254 254 (
255 255 '127.0.0.1-127.0.0.2,127.0.0.1-127.0.0.6,',
256 256 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5',
257 257 '127.0.0.6'],
258 258 False
259 259 ),
260 260 ])
261 261 def test_ip_range_generator(ip_range, expected, expect_errors):
262 262 func = UserModel().parse_ip_range
263 263 if expect_errors:
264 pytest.raises(Exception, func, ip_range)
264 pytest.raises(ValueError, func, ip_range)
265 265 else:
266 266 parsed_list = func(ip_range)
267 267 assert parsed_list == expected
268 268
269 269
270 270 def test_user_delete_cascades_ip_whitelist(test_user):
271 271 sample_ip = '1.1.1.1'
272 272 uid_map = UserIpMap(user_id=test_user.user_id, ip_addr=sample_ip)
273 273 Session().add(uid_map)
274 274 Session().delete(test_user)
275 275 try:
276 276 Session().flush()
277 277 finally:
278 278 Session().rollback()
279 279
280 280
281 281 def test_account_for_deactivation_generation(test_user):
282 282 accounts = UserModel().get_accounts_in_creation_order(
283 283 current_user=test_user)
284 284 # current user should be #1 in the list
285 285 assert accounts[0] == test_user.user_id
286 286 active_users = User.query().filter(User.active == true()).count()
287 287 assert active_users == len(accounts)
288 288
289 289
290 290 def test_user_delete_cascades_permissions_on_repo(backend, test_user):
291 291 test_repo = backend.create_repo()
292 292 RepoModel().grant_user_permission(
293 293 test_repo, test_user, 'repository.write')
294 294 Session().commit()
295 295
296 296 assert test_user.repo_to_perm
297 297
298 298 UserModel().delete(test_user)
299 299 Session().commit()
300 300
301 301
302 302 def test_user_delete_cascades_permissions_on_repo_group(
303 303 test_repo_group, test_user):
304 304 RepoGroupModel().grant_user_permission(
305 305 test_repo_group, test_user, 'group.write')
306 306 Session().commit()
307 307
308 308 assert test_user.repo_group_to_perm
309 309
310 310 Session().delete(test_user)
311 311 Session().commit()
312 312
313 313
314 314 def test_user_delete_cascades_permissions_on_user_group(
315 315 test_user_group, test_user):
316 316 UserGroupModel().grant_user_permission(
317 317 test_user_group, test_user, 'usergroup.write')
318 318 Session().commit()
319 319
320 320 assert test_user.user_group_to_perm
321 321
322 322 Session().delete(test_user)
323 323 Session().commit()
@@ -1,257 +1,256 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 # Import early to make sure things are patched up properly
22 22 from setuptools import setup, find_packages
23 23
24 24 import os
25 25 import sys
26 26 import pkgutil
27 27 import platform
28 28
29 29 from pip.download import PipSession
30 30 from pip.req import parse_requirements
31 31
32 32 from codecs import open
33 33
34 34
35 35 if sys.version_info < (2, 7):
36 36 raise Exception('RhodeCode requires Python 2.7 or later')
37 37
38 38 here = os.path.abspath(os.path.dirname(__file__))
39 39
40 40 # defines current platform
41 41 __platform__ = platform.system()
42 42 __license__ = 'AGPLv3, and Commercial License'
43 43 __author__ = 'RhodeCode GmbH'
44 44 __url__ = 'https://code.rhodecode.com'
45 45 is_windows = __platform__ in ('Windows',)
46 46
47 47
48 48 def _get_requirements(req_filename, exclude=None, extras=None):
49 49 extras = extras or []
50 50 exclude = exclude or []
51 51
52 52 try:
53 53 parsed = parse_requirements(
54 54 os.path.join(here, req_filename), session=PipSession())
55 55 except TypeError:
56 56 # try pip < 6.0.0, that doesn't support session
57 57 parsed = parse_requirements(os.path.join(here, req_filename))
58 58
59 59 requirements = []
60 60 for ir in parsed:
61 61 if ir.req and ir.name not in exclude:
62 62 requirements.append(str(ir.req))
63 63 return requirements + extras
64 64
65 65
66 66 # requirements extract
67 67 setup_requirements = ['PasteScript', 'pytest-runner']
68 68 install_requirements = _get_requirements(
69 69 'requirements.txt', exclude=['setuptools'])
70 70 test_requirements = _get_requirements(
71 71 'requirements_test.txt', extras=['configobj'])
72 72
73 73 install_requirements = [
74 74 'Babel',
75 75 'Beaker',
76 76 'FormEncode',
77 77 'Mako',
78 78 'Markdown',
79 79 'MarkupSafe',
80 80 'MySQL-python',
81 81 'Paste',
82 82 'PasteDeploy',
83 83 'PasteScript',
84 84 'Pygments',
85 85 'pygments-markdown-lexer',
86 86 'Pylons',
87 87 'Routes',
88 88 'SQLAlchemy',
89 89 'Tempita',
90 90 'URLObject',
91 91 'WebError',
92 92 'WebHelpers',
93 93 'WebHelpers2',
94 94 'WebOb',
95 95 'WebTest',
96 96 'Whoosh',
97 97 'alembic',
98 98 'amqplib',
99 99 'anyjson',
100 100 'appenlight-client',
101 101 'authomatic',
102 'backport_ipaddress',
103 102 'cssselect',
104 103 'celery',
105 104 'channelstream',
106 105 'colander',
107 106 'decorator',
108 107 'deform',
109 108 'docutils',
110 109 'gevent',
111 110 'gunicorn',
112 111 'infrae.cache',
113 112 'ipython',
114 113 'iso8601',
115 114 'kombu',
116 115 'lxml',
117 116 'msgpack-python',
118 117 'nbconvert',
119 118 'packaging',
120 119 'psycopg2',
121 120 'py-gfm',
122 121 'pycrypto',
123 122 'pycurl',
124 123 'pyparsing',
125 124 'pyramid',
126 125 'pyramid-debugtoolbar',
127 126 'pyramid-mako',
128 127 'pyramid-beaker',
129 128 'pysqlite',
130 129 'python-dateutil',
131 130 'python-ldap',
132 131 'python-memcached',
133 132 'python-pam',
134 133 'recaptcha-client',
135 134 'repoze.lru',
136 135 'requests',
137 136 'simplejson',
138 137 'subprocess32',
139 138 'waitress',
140 139 'zope.cachedescriptors',
141 140 'dogpile.cache',
142 141 'dogpile.core',
143 142 'psutil',
144 143 'py-bcrypt',
145 144 ]
146 145
147 146
148 147 def get_version():
149 148 version = pkgutil.get_data('rhodecode', 'VERSION')
150 149 return version.strip()
151 150
152 151
153 152 # additional files that goes into package itself
154 153 package_data = {
155 154 '': ['*.txt', '*.rst'],
156 155 'configs': ['*.ini'],
157 156 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
158 157 }
159 158
160 159 description = 'Source Code Management Platform'
161 160 keywords = ' '.join([
162 161 'rhodecode', 'mercurial', 'git', 'svn',
163 162 'code review',
164 163 'repo groups', 'ldap', 'repository management', 'hgweb',
165 164 'hgwebdir', 'gitweb', 'serving hgweb',
166 165 ])
167 166
168 167
169 168 # README/DESCRIPTION generation
170 169 readme_file = 'README.rst'
171 170 changelog_file = 'CHANGES.rst'
172 171 try:
173 172 long_description = open(readme_file).read() + '\n\n' + \
174 173 open(changelog_file).read()
175 174 except IOError as err:
176 175 sys.stderr.write(
177 176 "[WARNING] Cannot find file specified as long_description (%s)\n "
178 177 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
179 178 long_description = description
180 179
181 180
182 181 setup(
183 182 name='rhodecode-enterprise-ce',
184 183 version=get_version(),
185 184 description=description,
186 185 long_description=long_description,
187 186 keywords=keywords,
188 187 license=__license__,
189 188 author=__author__,
190 189 author_email='marcin@rhodecode.com',
191 190 url=__url__,
192 191 setup_requires=setup_requirements,
193 192 install_requires=install_requirements,
194 193 tests_require=test_requirements,
195 194 zip_safe=False,
196 195 packages=find_packages(exclude=["docs", "tests*"]),
197 196 package_data=package_data,
198 197 include_package_data=True,
199 198 classifiers=[
200 199 'Development Status :: 6 - Mature',
201 200 'Environment :: Web Environment',
202 201 'Intended Audience :: Developers',
203 202 'Operating System :: OS Independent',
204 203 'Topic :: Software Development :: Version Control',
205 204 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
206 205 'Programming Language :: Python :: 2.7',
207 206 ],
208 207 message_extractors={
209 208 'rhodecode': [
210 209 ('**.py', 'python', None),
211 210 ('**.js', 'javascript', None),
212 211 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
213 212 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
214 213 ('public/**', 'ignore', None),
215 214 ]
216 215 },
217 216 paster_plugins=['PasteScript', 'Pylons'],
218 217 entry_points={
219 218 'enterprise.plugins1': [
220 219 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 220 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 221 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 222 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 223 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 224 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 225 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 226 ],
228 227 'paste.app_factory': [
229 228 'main=rhodecode.config.middleware:make_pyramid_app',
230 229 'pylons=rhodecode.config.middleware:make_app',
231 230 ],
232 231 'paste.app_install': [
233 232 'main=pylons.util:PylonsInstaller',
234 233 'pylons=pylons.util:PylonsInstaller',
235 234 ],
236 235 'paste.global_paster_command': [
237 236 'make-config=rhodecode.lib.paster_commands.make_config:Command',
238 237 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
239 238 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
240 239 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
241 240 'ishell=rhodecode.lib.paster_commands.ishell:Command',
242 241 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
243 242 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
244 243 ],
245 244 'pytest11': [
246 245 'pylons=rhodecode.tests.pylons_plugin',
247 246 'enterprise=rhodecode.tests.plugin',
248 247 ],
249 248 'console_scripts': [
250 249 'rcserver=rhodecode.rcserver:main',
251 250 ],
252 251 'beaker.backends': [
253 252 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
254 253 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
255 254 ]
256 255 },
257 256 )
General Comments 0
You need to be logged in to leave comments. Login now