##// END OF EJS Templates
dependencies: bumped pyramid to 1.9 webob to 1.7.3 and webtest to 2.0.27...
marcink -
r1906:1eaf71e3 default
parent child Browse files
Show More
@@ -1,2034 +1,2086 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.4.0
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 {
5 Babel = super.buildPythonPackage {
5 Babel = super.buildPythonPackage {
6 name = "Babel-1.3";
6 name = "Babel-1.3";
7 buildInputs = with self; [];
7 buildInputs = with self; [];
8 doCheck = false;
8 doCheck = false;
9 propagatedBuildInputs = with self; [pytz];
9 propagatedBuildInputs = with self; [pytz];
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
11 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
12 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
12 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
16 };
17 };
17 };
18 Beaker = super.buildPythonPackage {
18 Beaker = super.buildPythonPackage {
19 name = "Beaker-1.9.0";
19 name = "Beaker-1.9.0";
20 buildInputs = with self; [];
20 buildInputs = with self; [];
21 doCheck = false;
21 doCheck = false;
22 propagatedBuildInputs = with self; [funcsigs];
22 propagatedBuildInputs = with self; [funcsigs];
23 src = fetchurl {
23 src = fetchurl {
24 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
24 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
25 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
25 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
26 };
26 };
27 meta = {
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
29 };
30 };
30 };
31 CProfileV = super.buildPythonPackage {
31 CProfileV = super.buildPythonPackage {
32 name = "CProfileV-1.0.7";
32 name = "CProfileV-1.0.7";
33 buildInputs = with self; [];
33 buildInputs = with self; [];
34 doCheck = false;
34 doCheck = false;
35 propagatedBuildInputs = with self; [bottle];
35 propagatedBuildInputs = with self; [bottle];
36 src = fetchurl {
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/df/50/d8c1ada7d537c64b0f76453fa31dedb6af6e27b82fcf0331e5f71a4cf98b/CProfileV-1.0.7.tar.gz";
37 url = "https://pypi.python.org/packages/df/50/d8c1ada7d537c64b0f76453fa31dedb6af6e27b82fcf0331e5f71a4cf98b/CProfileV-1.0.7.tar.gz";
38 md5 = "db4c7640438aa3d8887e194c81c7a019";
38 md5 = "db4c7640438aa3d8887e194c81c7a019";
39 };
39 };
40 meta = {
40 meta = {
41 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
42 };
42 };
43 };
43 };
44 Chameleon = super.buildPythonPackage {
44 Chameleon = super.buildPythonPackage {
45 name = "Chameleon-2.24";
45 name = "Chameleon-2.24";
46 buildInputs = with self; [];
46 buildInputs = with self; [];
47 doCheck = false;
47 doCheck = false;
48 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
49 src = fetchurl {
50 url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
50 url = "https://pypi.python.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
51 md5 = "1b01f1f6533a8a11d0d2f2366dec5342";
51 md5 = "1b01f1f6533a8a11d0d2f2366dec5342";
52 };
52 };
53 meta = {
53 meta = {
54 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
54 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
55 };
55 };
56 };
56 };
57 FormEncode = super.buildPythonPackage {
57 FormEncode = super.buildPythonPackage {
58 name = "FormEncode-1.2.4";
58 name = "FormEncode-1.2.4";
59 buildInputs = with self; [];
59 buildInputs = with self; [];
60 doCheck = false;
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
63 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
64 md5 = "6bc17fb9aed8aea198975e888e2077f4";
64 md5 = "6bc17fb9aed8aea198975e888e2077f4";
65 };
65 };
66 meta = {
66 meta = {
67 license = [ pkgs.lib.licenses.psfl ];
67 license = [ pkgs.lib.licenses.psfl ];
68 };
68 };
69 };
69 };
70 Jinja2 = super.buildPythonPackage {
70 Jinja2 = super.buildPythonPackage {
71 name = "Jinja2-2.7.3";
71 name = "Jinja2-2.7.3";
72 buildInputs = with self; [];
72 buildInputs = with self; [];
73 doCheck = false;
73 doCheck = false;
74 propagatedBuildInputs = with self; [MarkupSafe];
74 propagatedBuildInputs = with self; [MarkupSafe];
75 src = fetchurl {
75 src = fetchurl {
76 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
76 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
77 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
77 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
78 };
78 };
79 meta = {
79 meta = {
80 license = [ pkgs.lib.licenses.bsdOriginal ];
80 license = [ pkgs.lib.licenses.bsdOriginal ];
81 };
81 };
82 };
82 };
83 Mako = super.buildPythonPackage {
83 Mako = super.buildPythonPackage {
84 name = "Mako-1.0.6";
84 name = "Mako-1.0.6";
85 buildInputs = with self; [];
85 buildInputs = with self; [];
86 doCheck = false;
86 doCheck = false;
87 propagatedBuildInputs = with self; [MarkupSafe];
87 propagatedBuildInputs = with self; [MarkupSafe];
88 src = fetchurl {
88 src = fetchurl {
89 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
89 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
90 md5 = "a28e22a339080316b2acc352b9ee631c";
90 md5 = "a28e22a339080316b2acc352b9ee631c";
91 };
91 };
92 meta = {
92 meta = {
93 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
94 };
94 };
95 };
95 };
96 Markdown = super.buildPythonPackage {
96 Markdown = super.buildPythonPackage {
97 name = "Markdown-2.6.8";
97 name = "Markdown-2.6.8";
98 buildInputs = with self; [];
98 buildInputs = with self; [];
99 doCheck = false;
99 doCheck = false;
100 propagatedBuildInputs = with self; [];
100 propagatedBuildInputs = with self; [];
101 src = fetchurl {
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/1d/25/3f6d2cb31ec42ca5bd3bfbea99b63892b735d76e26f20dd2dcc34ffe4f0d/Markdown-2.6.8.tar.gz";
102 url = "https://pypi.python.org/packages/1d/25/3f6d2cb31ec42ca5bd3bfbea99b63892b735d76e26f20dd2dcc34ffe4f0d/Markdown-2.6.8.tar.gz";
103 md5 = "d9ef057a5bd185f6f536400a31fc5d45";
103 md5 = "d9ef057a5bd185f6f536400a31fc5d45";
104 };
104 };
105 meta = {
105 meta = {
106 license = [ pkgs.lib.licenses.bsdOriginal ];
106 license = [ pkgs.lib.licenses.bsdOriginal ];
107 };
107 };
108 };
108 };
109 MarkupSafe = super.buildPythonPackage {
109 MarkupSafe = super.buildPythonPackage {
110 name = "MarkupSafe-0.23";
110 name = "MarkupSafe-0.23";
111 buildInputs = with self; [];
111 buildInputs = with self; [];
112 doCheck = false;
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
115 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
116 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
116 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
117 };
117 };
118 meta = {
118 meta = {
119 license = [ pkgs.lib.licenses.bsdOriginal ];
119 license = [ pkgs.lib.licenses.bsdOriginal ];
120 };
120 };
121 };
121 };
122 MySQL-python = super.buildPythonPackage {
122 MySQL-python = super.buildPythonPackage {
123 name = "MySQL-python-1.2.5";
123 name = "MySQL-python-1.2.5";
124 buildInputs = with self; [];
124 buildInputs = with self; [];
125 doCheck = false;
125 doCheck = false;
126 propagatedBuildInputs = with self; [];
126 propagatedBuildInputs = with self; [];
127 src = fetchurl {
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
128 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
129 md5 = "654f75b302db6ed8dc5a898c625e030c";
129 md5 = "654f75b302db6ed8dc5a898c625e030c";
130 };
130 };
131 meta = {
131 meta = {
132 license = [ pkgs.lib.licenses.gpl1 ];
132 license = [ pkgs.lib.licenses.gpl1 ];
133 };
133 };
134 };
134 };
135 Paste = super.buildPythonPackage {
135 Paste = super.buildPythonPackage {
136 name = "Paste-2.0.3";
136 name = "Paste-2.0.3";
137 buildInputs = with self; [];
137 buildInputs = with self; [];
138 doCheck = false;
138 doCheck = false;
139 propagatedBuildInputs = with self; [six];
139 propagatedBuildInputs = with self; [six];
140 src = fetchurl {
140 src = fetchurl {
141 url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz";
141 url = "https://pypi.python.org/packages/30/c3/5c2f7c7a02e4f58d4454353fa1c32c94f79fa4e36d07a67c0ac295ea369e/Paste-2.0.3.tar.gz";
142 md5 = "1231e14eae62fa7ed76e9130b04bc61e";
142 md5 = "1231e14eae62fa7ed76e9130b04bc61e";
143 };
143 };
144 meta = {
144 meta = {
145 license = [ pkgs.lib.licenses.mit ];
145 license = [ pkgs.lib.licenses.mit ];
146 };
146 };
147 };
147 };
148 PasteDeploy = super.buildPythonPackage {
148 PasteDeploy = super.buildPythonPackage {
149 name = "PasteDeploy-1.5.2";
149 name = "PasteDeploy-1.5.2";
150 buildInputs = with self; [];
150 buildInputs = with self; [];
151 doCheck = false;
151 doCheck = false;
152 propagatedBuildInputs = with self; [];
152 propagatedBuildInputs = with self; [];
153 src = fetchurl {
153 src = fetchurl {
154 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
154 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
155 md5 = "352b7205c78c8de4987578d19431af3b";
155 md5 = "352b7205c78c8de4987578d19431af3b";
156 };
156 };
157 meta = {
157 meta = {
158 license = [ pkgs.lib.licenses.mit ];
158 license = [ pkgs.lib.licenses.mit ];
159 };
159 };
160 };
160 };
161 PasteScript = super.buildPythonPackage {
161 PasteScript = super.buildPythonPackage {
162 name = "PasteScript-1.7.5";
162 name = "PasteScript-1.7.5";
163 buildInputs = with self; [];
163 buildInputs = with self; [];
164 doCheck = false;
164 doCheck = false;
165 propagatedBuildInputs = with self; [Paste PasteDeploy];
165 propagatedBuildInputs = with self; [Paste PasteDeploy];
166 src = fetchurl {
166 src = fetchurl {
167 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
167 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
168 md5 = "4c72d78dcb6bb993f30536842c16af4d";
168 md5 = "4c72d78dcb6bb993f30536842c16af4d";
169 };
169 };
170 meta = {
170 meta = {
171 license = [ pkgs.lib.licenses.mit ];
171 license = [ pkgs.lib.licenses.mit ];
172 };
172 };
173 };
173 };
174 Pygments = super.buildPythonPackage {
174 Pygments = super.buildPythonPackage {
175 name = "Pygments-2.2.0";
175 name = "Pygments-2.2.0";
176 buildInputs = with self; [];
176 buildInputs = with self; [];
177 doCheck = false;
177 doCheck = false;
178 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
179 src = fetchurl {
179 src = fetchurl {
180 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
180 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
181 md5 = "13037baca42f16917cbd5ad2fab50844";
181 md5 = "13037baca42f16917cbd5ad2fab50844";
182 };
182 };
183 meta = {
183 meta = {
184 license = [ pkgs.lib.licenses.bsdOriginal ];
184 license = [ pkgs.lib.licenses.bsdOriginal ];
185 };
185 };
186 };
186 };
187 Pylons = super.buildPythonPackage {
187 Pylons = super.buildPythonPackage {
188 name = "Pylons-1.0.2.dev20170630";
188 name = "Pylons-1.0.2.dev20170630";
189 buildInputs = with self; [];
189 buildInputs = with self; [];
190 doCheck = false;
190 doCheck = false;
191 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
191 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
192 src = fetchurl {
192 src = fetchurl {
193 url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f";
193 url = "https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f";
194 md5 = "f26633726fa2cd3a340316ee6a5d218f";
194 md5 = "f26633726fa2cd3a340316ee6a5d218f";
195 };
195 };
196 meta = {
196 meta = {
197 license = [ pkgs.lib.licenses.bsdOriginal ];
197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 };
198 };
199 };
199 };
200 Routes = super.buildPythonPackage {
200 Routes = super.buildPythonPackage {
201 name = "Routes-1.13";
201 name = "Routes-1.13";
202 buildInputs = with self; [];
202 buildInputs = with self; [];
203 doCheck = false;
203 doCheck = false;
204 propagatedBuildInputs = with self; [repoze.lru];
204 propagatedBuildInputs = with self; [repoze.lru];
205 src = fetchurl {
205 src = fetchurl {
206 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
206 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
207 md5 = "d527b0ab7dd9172b1275a41f97448783";
207 md5 = "d527b0ab7dd9172b1275a41f97448783";
208 };
208 };
209 meta = {
209 meta = {
210 license = [ pkgs.lib.licenses.bsdOriginal ];
210 license = [ pkgs.lib.licenses.bsdOriginal ];
211 };
211 };
212 };
212 };
213 SQLAlchemy = super.buildPythonPackage {
213 SQLAlchemy = super.buildPythonPackage {
214 name = "SQLAlchemy-0.9.9";
214 name = "SQLAlchemy-0.9.9";
215 buildInputs = with self; [];
215 buildInputs = with self; [];
216 doCheck = false;
216 doCheck = false;
217 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = with self; [];
218 src = fetchurl {
218 src = fetchurl {
219 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
219 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
220 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
220 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
221 };
221 };
222 meta = {
222 meta = {
223 license = [ pkgs.lib.licenses.mit ];
223 license = [ pkgs.lib.licenses.mit ];
224 };
224 };
225 };
225 };
226 Sphinx = super.buildPythonPackage {
226 Sphinx = super.buildPythonPackage {
227 name = "Sphinx-1.2.2";
227 name = "Sphinx-1.2.2";
228 buildInputs = with self; [];
228 buildInputs = with self; [];
229 doCheck = false;
229 doCheck = false;
230 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
230 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
231 src = fetchurl {
231 src = fetchurl {
232 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
232 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
233 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
233 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
234 };
234 };
235 meta = {
235 meta = {
236 license = [ pkgs.lib.licenses.bsdOriginal ];
236 license = [ pkgs.lib.licenses.bsdOriginal ];
237 };
237 };
238 };
238 };
239 Tempita = super.buildPythonPackage {
239 Tempita = super.buildPythonPackage {
240 name = "Tempita-0.5.2";
240 name = "Tempita-0.5.2";
241 buildInputs = with self; [];
241 buildInputs = with self; [];
242 doCheck = false;
242 doCheck = false;
243 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = with self; [];
244 src = fetchurl {
244 src = fetchurl {
245 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
245 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
246 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
246 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
247 };
247 };
248 meta = {
248 meta = {
249 license = [ pkgs.lib.licenses.mit ];
249 license = [ pkgs.lib.licenses.mit ];
250 };
250 };
251 };
251 };
252 URLObject = super.buildPythonPackage {
252 URLObject = super.buildPythonPackage {
253 name = "URLObject-2.4.0";
253 name = "URLObject-2.4.0";
254 buildInputs = with self; [];
254 buildInputs = with self; [];
255 doCheck = false;
255 doCheck = false;
256 propagatedBuildInputs = with self; [];
256 propagatedBuildInputs = with self; [];
257 src = fetchurl {
257 src = fetchurl {
258 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
258 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
259 md5 = "2ed819738a9f0a3051f31dc9924e3065";
259 md5 = "2ed819738a9f0a3051f31dc9924e3065";
260 };
260 };
261 meta = {
261 meta = {
262 license = [ ];
262 license = [ ];
263 };
263 };
264 };
264 };
265 WebError = super.buildPythonPackage {
265 WebError = super.buildPythonPackage {
266 name = "WebError-0.10.3";
266 name = "WebError-0.10.3";
267 buildInputs = with self; [];
267 buildInputs = with self; [];
268 doCheck = false;
268 doCheck = false;
269 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
269 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
270 src = fetchurl {
270 src = fetchurl {
271 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
271 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
272 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
272 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
273 };
273 };
274 meta = {
274 meta = {
275 license = [ pkgs.lib.licenses.mit ];
275 license = [ pkgs.lib.licenses.mit ];
276 };
276 };
277 };
277 };
278 WebHelpers = super.buildPythonPackage {
278 WebHelpers = super.buildPythonPackage {
279 name = "WebHelpers-1.3";
279 name = "WebHelpers-1.3";
280 buildInputs = with self; [];
280 buildInputs = with self; [];
281 doCheck = false;
281 doCheck = false;
282 propagatedBuildInputs = with self; [MarkupSafe];
282 propagatedBuildInputs = with self; [MarkupSafe];
283 src = fetchurl {
283 src = fetchurl {
284 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
284 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
285 md5 = "32749ffadfc40fea51075a7def32588b";
285 md5 = "32749ffadfc40fea51075a7def32588b";
286 };
286 };
287 meta = {
287 meta = {
288 license = [ pkgs.lib.licenses.bsdOriginal ];
288 license = [ pkgs.lib.licenses.bsdOriginal ];
289 };
289 };
290 };
290 };
291 WebHelpers2 = super.buildPythonPackage {
291 WebHelpers2 = super.buildPythonPackage {
292 name = "WebHelpers2-2.0";
292 name = "WebHelpers2-2.0";
293 buildInputs = with self; [];
293 buildInputs = with self; [];
294 doCheck = false;
294 doCheck = false;
295 propagatedBuildInputs = with self; [MarkupSafe six];
295 propagatedBuildInputs = with self; [MarkupSafe six];
296 src = fetchurl {
296 src = fetchurl {
297 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
297 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
298 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
298 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
299 };
299 };
300 meta = {
300 meta = {
301 license = [ pkgs.lib.licenses.mit ];
301 license = [ pkgs.lib.licenses.mit ];
302 };
302 };
303 };
303 };
304 WebOb = super.buildPythonPackage {
304 WebOb = super.buildPythonPackage {
305 name = "WebOb-1.3.1";
305 name = "WebOb-1.7.3";
306 buildInputs = with self; [];
306 buildInputs = with self; [];
307 doCheck = false;
307 doCheck = false;
308 propagatedBuildInputs = with self; [];
308 propagatedBuildInputs = with self; [];
309 src = fetchurl {
309 src = fetchurl {
310 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
310 url = "https://pypi.python.org/packages/46/87/2f96d8d43b2078fae6e1d33fa86b95c228cebed060f4e3c7576cc44ea83b/WebOb-1.7.3.tar.gz";
311 md5 = "20918251c5726956ba8fef22d1556177";
311 md5 = "350028baffc508e3d23c078118e35316";
312 };
312 };
313 meta = {
313 meta = {
314 license = [ pkgs.lib.licenses.mit ];
314 license = [ pkgs.lib.licenses.mit ];
315 };
315 };
316 };
316 };
317 WebTest = super.buildPythonPackage {
317 WebTest = super.buildPythonPackage {
318 name = "WebTest-1.4.3";
318 name = "WebTest-2.0.27";
319 buildInputs = with self; [];
319 buildInputs = with self; [];
320 doCheck = false;
320 doCheck = false;
321 propagatedBuildInputs = with self; [WebOb];
321 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
322 src = fetchurl {
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
323 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
324 md5 = "631ce728bed92c681a4020a36adbc353";
324 md5 = "54e6515ac71c51b6fc90179483c749ad";
325 };
325 };
326 meta = {
326 meta = {
327 license = [ pkgs.lib.licenses.mit ];
327 license = [ pkgs.lib.licenses.mit ];
328 };
328 };
329 };
329 };
330 Whoosh = super.buildPythonPackage {
330 Whoosh = super.buildPythonPackage {
331 name = "Whoosh-2.7.4";
331 name = "Whoosh-2.7.4";
332 buildInputs = with self; [];
332 buildInputs = with self; [];
333 doCheck = false;
333 doCheck = false;
334 propagatedBuildInputs = with self; [];
334 propagatedBuildInputs = with self; [];
335 src = fetchurl {
335 src = fetchurl {
336 url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
336 url = "https://pypi.python.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
337 md5 = "c2710105f20b3e29936bd2357383c325";
337 md5 = "c2710105f20b3e29936bd2357383c325";
338 };
338 };
339 meta = {
339 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
340 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
341 };
341 };
342 };
342 };
343 alembic = super.buildPythonPackage {
343 alembic = super.buildPythonPackage {
344 name = "alembic-0.9.2";
344 name = "alembic-0.9.2";
345 buildInputs = with self; [];
345 buildInputs = with self; [];
346 doCheck = false;
346 doCheck = false;
347 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor python-dateutil];
347 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor python-dateutil];
348 src = fetchurl {
348 src = fetchurl {
349 url = "https://pypi.python.org/packages/78/48/b5b26e7218b415f40b60b92c53853d242e5456c0f19f6c66101d98ff5f2a/alembic-0.9.2.tar.gz";
349 url = "https://pypi.python.org/packages/78/48/b5b26e7218b415f40b60b92c53853d242e5456c0f19f6c66101d98ff5f2a/alembic-0.9.2.tar.gz";
350 md5 = "40daf8bae50969beea40efaaf0839ff4";
350 md5 = "40daf8bae50969beea40efaaf0839ff4";
351 };
351 };
352 meta = {
352 meta = {
353 license = [ pkgs.lib.licenses.mit ];
353 license = [ pkgs.lib.licenses.mit ];
354 };
354 };
355 };
355 };
356 amqplib = super.buildPythonPackage {
356 amqplib = super.buildPythonPackage {
357 name = "amqplib-1.0.2";
357 name = "amqplib-1.0.2";
358 buildInputs = with self; [];
358 buildInputs = with self; [];
359 doCheck = false;
359 doCheck = false;
360 propagatedBuildInputs = with self; [];
360 propagatedBuildInputs = with self; [];
361 src = fetchurl {
361 src = fetchurl {
362 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
362 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
363 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
363 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
364 };
364 };
365 meta = {
365 meta = {
366 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
366 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
367 };
367 };
368 };
368 };
369 anyjson = super.buildPythonPackage {
369 anyjson = super.buildPythonPackage {
370 name = "anyjson-0.3.3";
370 name = "anyjson-0.3.3";
371 buildInputs = with self; [];
371 buildInputs = with self; [];
372 doCheck = false;
372 doCheck = false;
373 propagatedBuildInputs = with self; [];
373 propagatedBuildInputs = with self; [];
374 src = fetchurl {
374 src = fetchurl {
375 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
375 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
376 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
376 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
377 };
377 };
378 meta = {
378 meta = {
379 license = [ pkgs.lib.licenses.bsdOriginal ];
379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 };
380 };
381 };
381 };
382 appenlight-client = super.buildPythonPackage {
382 appenlight-client = super.buildPythonPackage {
383 name = "appenlight-client-0.6.21";
383 name = "appenlight-client-0.6.21";
384 buildInputs = with self; [];
384 buildInputs = with self; [];
385 doCheck = false;
385 doCheck = false;
386 propagatedBuildInputs = with self; [WebOb requests six];
386 propagatedBuildInputs = with self; [WebOb requests six];
387 src = fetchurl {
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/c9/23/91b66cfa0b963662c10b2a06ccaadf3f3a4848a7a2aa16255cb43d5160ec/appenlight_client-0.6.21.tar.gz";
388 url = "https://pypi.python.org/packages/c9/23/91b66cfa0b963662c10b2a06ccaadf3f3a4848a7a2aa16255cb43d5160ec/appenlight_client-0.6.21.tar.gz";
389 md5 = "273999ac854fdaefa8d0fb61965a4ed9";
389 md5 = "273999ac854fdaefa8d0fb61965a4ed9";
390 };
390 };
391 meta = {
391 meta = {
392 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
392 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
393 };
393 };
394 };
394 };
395 authomatic = super.buildPythonPackage {
395 authomatic = super.buildPythonPackage {
396 name = "authomatic-0.1.0.post1";
396 name = "authomatic-0.1.0.post1";
397 buildInputs = with self; [];
397 buildInputs = with self; [];
398 doCheck = false;
398 doCheck = false;
399 propagatedBuildInputs = with self; [];
399 propagatedBuildInputs = with self; [];
400 src = fetchurl {
400 src = fetchurl {
401 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
401 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
402 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
402 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
403 };
403 };
404 meta = {
404 meta = {
405 license = [ pkgs.lib.licenses.mit ];
405 license = [ pkgs.lib.licenses.mit ];
406 };
406 };
407 };
407 };
408 backport-ipaddress = super.buildPythonPackage {
408 backport-ipaddress = super.buildPythonPackage {
409 name = "backport-ipaddress-0.1";
409 name = "backport-ipaddress-0.1";
410 buildInputs = with self; [];
410 buildInputs = with self; [];
411 doCheck = false;
411 doCheck = false;
412 propagatedBuildInputs = with self; [];
412 propagatedBuildInputs = with self; [];
413 src = fetchurl {
413 src = fetchurl {
414 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
414 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
415 md5 = "9c1f45f4361f71b124d7293a60006c05";
415 md5 = "9c1f45f4361f71b124d7293a60006c05";
416 };
416 };
417 meta = {
417 meta = {
418 license = [ pkgs.lib.licenses.psfl ];
418 license = [ pkgs.lib.licenses.psfl ];
419 };
419 };
420 };
420 };
421 backports.shutil-get-terminal-size = super.buildPythonPackage {
421 backports.shutil-get-terminal-size = super.buildPythonPackage {
422 name = "backports.shutil-get-terminal-size-1.0.0";
422 name = "backports.shutil-get-terminal-size-1.0.0";
423 buildInputs = with self; [];
423 buildInputs = with self; [];
424 doCheck = false;
424 doCheck = false;
425 propagatedBuildInputs = with self; [];
425 propagatedBuildInputs = with self; [];
426 src = fetchurl {
426 src = fetchurl {
427 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
427 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
428 md5 = "03267762480bd86b50580dc19dff3c66";
428 md5 = "03267762480bd86b50580dc19dff3c66";
429 };
429 };
430 meta = {
430 meta = {
431 license = [ pkgs.lib.licenses.mit ];
431 license = [ pkgs.lib.licenses.mit ];
432 };
432 };
433 };
433 };
434 beautifulsoup4 = super.buildPythonPackage {
435 name = "beautifulsoup4-4.6.0";
436 buildInputs = with self; [];
437 doCheck = false;
438 propagatedBuildInputs = with self; [];
439 src = fetchurl {
440 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
441 md5 = "c17714d0f91a23b708a592cb3c697728";
442 };
443 meta = {
444 license = [ pkgs.lib.licenses.mit ];
445 };
446 };
434 bleach = super.buildPythonPackage {
447 bleach = super.buildPythonPackage {
435 name = "bleach-1.5.0";
448 name = "bleach-1.5.0";
436 buildInputs = with self; [];
449 buildInputs = with self; [];
437 doCheck = false;
450 doCheck = false;
438 propagatedBuildInputs = with self; [six html5lib];
451 propagatedBuildInputs = with self; [six html5lib];
439 src = fetchurl {
452 src = fetchurl {
440 url = "https://pypi.python.org/packages/99/00/25a8fce4de102bf6e3cc76bc4ea60685b2fee33bde1b34830c70cacc26a7/bleach-1.5.0.tar.gz";
453 url = "https://pypi.python.org/packages/99/00/25a8fce4de102bf6e3cc76bc4ea60685b2fee33bde1b34830c70cacc26a7/bleach-1.5.0.tar.gz";
441 md5 = "b663300efdf421b3b727b19d7be9c7e7";
454 md5 = "b663300efdf421b3b727b19d7be9c7e7";
442 };
455 };
443 meta = {
456 meta = {
444 license = [ pkgs.lib.licenses.asl20 ];
457 license = [ pkgs.lib.licenses.asl20 ];
445 };
458 };
446 };
459 };
447 bottle = super.buildPythonPackage {
460 bottle = super.buildPythonPackage {
448 name = "bottle-0.12.8";
461 name = "bottle-0.12.8";
449 buildInputs = with self; [];
462 buildInputs = with self; [];
450 doCheck = false;
463 doCheck = false;
451 propagatedBuildInputs = with self; [];
464 propagatedBuildInputs = with self; [];
452 src = fetchurl {
465 src = fetchurl {
453 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
466 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
454 md5 = "13132c0a8f607bf860810a6ee9064c5b";
467 md5 = "13132c0a8f607bf860810a6ee9064c5b";
455 };
468 };
456 meta = {
469 meta = {
457 license = [ pkgs.lib.licenses.mit ];
470 license = [ pkgs.lib.licenses.mit ];
458 };
471 };
459 };
472 };
460 bumpversion = super.buildPythonPackage {
473 bumpversion = super.buildPythonPackage {
461 name = "bumpversion-0.5.3";
474 name = "bumpversion-0.5.3";
462 buildInputs = with self; [];
475 buildInputs = with self; [];
463 doCheck = false;
476 doCheck = false;
464 propagatedBuildInputs = with self; [];
477 propagatedBuildInputs = with self; [];
465 src = fetchurl {
478 src = fetchurl {
466 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
479 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
467 md5 = "c66a3492eafcf5ad4b024be9fca29820";
480 md5 = "c66a3492eafcf5ad4b024be9fca29820";
468 };
481 };
469 meta = {
482 meta = {
470 license = [ pkgs.lib.licenses.mit ];
483 license = [ pkgs.lib.licenses.mit ];
471 };
484 };
472 };
485 };
473 celery = super.buildPythonPackage {
486 celery = super.buildPythonPackage {
474 name = "celery-2.2.10";
487 name = "celery-2.2.10";
475 buildInputs = with self; [];
488 buildInputs = with self; [];
476 doCheck = false;
489 doCheck = false;
477 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
490 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
478 src = fetchurl {
491 src = fetchurl {
479 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
492 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
480 md5 = "898bc87e54f278055b561316ba73e222";
493 md5 = "898bc87e54f278055b561316ba73e222";
481 };
494 };
482 meta = {
495 meta = {
483 license = [ pkgs.lib.licenses.bsdOriginal ];
496 license = [ pkgs.lib.licenses.bsdOriginal ];
484 };
497 };
485 };
498 };
486 channelstream = super.buildPythonPackage {
499 channelstream = super.buildPythonPackage {
487 name = "channelstream-0.5.2";
500 name = "channelstream-0.5.2";
488 buildInputs = with self; [];
501 buildInputs = with self; [];
489 doCheck = false;
502 doCheck = false;
490 propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six];
503 propagatedBuildInputs = with self; [gevent ws4py pyramid pyramid-jinja2 itsdangerous requests six];
491 src = fetchurl {
504 src = fetchurl {
492 url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
505 url = "https://pypi.python.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
493 md5 = "1c5eb2a8a405be6f1073da94da6d81d3";
506 md5 = "1c5eb2a8a405be6f1073da94da6d81d3";
494 };
507 };
495 meta = {
508 meta = {
496 license = [ pkgs.lib.licenses.bsdOriginal ];
509 license = [ pkgs.lib.licenses.bsdOriginal ];
497 };
510 };
498 };
511 };
499 click = super.buildPythonPackage {
512 click = super.buildPythonPackage {
500 name = "click-5.1";
513 name = "click-5.1";
501 buildInputs = with self; [];
514 buildInputs = with self; [];
502 doCheck = false;
515 doCheck = false;
503 propagatedBuildInputs = with self; [];
516 propagatedBuildInputs = with self; [];
504 src = fetchurl {
517 src = fetchurl {
505 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
518 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
506 md5 = "9c5323008cccfe232a8b161fc8196d41";
519 md5 = "9c5323008cccfe232a8b161fc8196d41";
507 };
520 };
508 meta = {
521 meta = {
509 license = [ pkgs.lib.licenses.bsdOriginal ];
522 license = [ pkgs.lib.licenses.bsdOriginal ];
510 };
523 };
511 };
524 };
512 colander = super.buildPythonPackage {
525 colander = super.buildPythonPackage {
513 name = "colander-1.3.3";
526 name = "colander-1.3.3";
514 buildInputs = with self; [];
527 buildInputs = with self; [];
515 doCheck = false;
528 doCheck = false;
516 propagatedBuildInputs = with self; [translationstring iso8601];
529 propagatedBuildInputs = with self; [translationstring iso8601];
517 src = fetchurl {
530 src = fetchurl {
518 url = "https://pypi.python.org/packages/54/a9/9862a561e015b2c7b56404c0b13828a8bdc51e05ab3703bd792cec064487/colander-1.3.3.tar.gz";
531 url = "https://pypi.python.org/packages/54/a9/9862a561e015b2c7b56404c0b13828a8bdc51e05ab3703bd792cec064487/colander-1.3.3.tar.gz";
519 md5 = "f5d783768c51d73695f49bbe95778ab4";
532 md5 = "f5d783768c51d73695f49bbe95778ab4";
520 };
533 };
521 meta = {
534 meta = {
522 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
535 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 };
536 };
524 };
537 };
525 configobj = super.buildPythonPackage {
538 configobj = super.buildPythonPackage {
526 name = "configobj-5.0.6";
539 name = "configobj-5.0.6";
527 buildInputs = with self; [];
540 buildInputs = with self; [];
528 doCheck = false;
541 doCheck = false;
529 propagatedBuildInputs = with self; [six];
542 propagatedBuildInputs = with self; [six];
530 src = fetchurl {
543 src = fetchurl {
531 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
544 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
532 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
545 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
533 };
546 };
534 meta = {
547 meta = {
535 license = [ pkgs.lib.licenses.bsdOriginal ];
548 license = [ pkgs.lib.licenses.bsdOriginal ];
536 };
549 };
537 };
550 };
538 configparser = super.buildPythonPackage {
551 configparser = super.buildPythonPackage {
539 name = "configparser-3.5.0";
552 name = "configparser-3.5.0";
540 buildInputs = with self; [];
553 buildInputs = with self; [];
541 doCheck = false;
554 doCheck = false;
542 propagatedBuildInputs = with self; [];
555 propagatedBuildInputs = with self; [];
543 src = fetchurl {
556 src = fetchurl {
544 url = "https://pypi.python.org/packages/7c/69/c2ce7e91c89dc073eb1aa74c0621c3eefbffe8216b3f9af9d3885265c01c/configparser-3.5.0.tar.gz";
557 url = "https://pypi.python.org/packages/7c/69/c2ce7e91c89dc073eb1aa74c0621c3eefbffe8216b3f9af9d3885265c01c/configparser-3.5.0.tar.gz";
545 md5 = "cfdd915a5b7a6c09917a64a573140538";
558 md5 = "cfdd915a5b7a6c09917a64a573140538";
546 };
559 };
547 meta = {
560 meta = {
548 license = [ pkgs.lib.licenses.mit ];
561 license = [ pkgs.lib.licenses.mit ];
549 };
562 };
550 };
563 };
551 cov-core = super.buildPythonPackage {
564 cov-core = super.buildPythonPackage {
552 name = "cov-core-1.15.0";
565 name = "cov-core-1.15.0";
553 buildInputs = with self; [];
566 buildInputs = with self; [];
554 doCheck = false;
567 doCheck = false;
555 propagatedBuildInputs = with self; [coverage];
568 propagatedBuildInputs = with self; [coverage];
556 src = fetchurl {
569 src = fetchurl {
557 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
570 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
558 md5 = "f519d4cb4c4e52856afb14af52919fe6";
571 md5 = "f519d4cb4c4e52856afb14af52919fe6";
559 };
572 };
560 meta = {
573 meta = {
561 license = [ pkgs.lib.licenses.mit ];
574 license = [ pkgs.lib.licenses.mit ];
562 };
575 };
563 };
576 };
564 coverage = super.buildPythonPackage {
577 coverage = super.buildPythonPackage {
565 name = "coverage-3.7.1";
578 name = "coverage-3.7.1";
566 buildInputs = with self; [];
579 buildInputs = with self; [];
567 doCheck = false;
580 doCheck = false;
568 propagatedBuildInputs = with self; [];
581 propagatedBuildInputs = with self; [];
569 src = fetchurl {
582 src = fetchurl {
570 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
583 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
571 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
584 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
572 };
585 };
573 meta = {
586 meta = {
574 license = [ pkgs.lib.licenses.bsdOriginal ];
587 license = [ pkgs.lib.licenses.bsdOriginal ];
575 };
588 };
576 };
589 };
577 cssselect = super.buildPythonPackage {
590 cssselect = super.buildPythonPackage {
578 name = "cssselect-1.0.1";
591 name = "cssselect-1.0.1";
579 buildInputs = with self; [];
592 buildInputs = with self; [];
580 doCheck = false;
593 doCheck = false;
581 propagatedBuildInputs = with self; [];
594 propagatedBuildInputs = with self; [];
582 src = fetchurl {
595 src = fetchurl {
583 url = "https://pypi.python.org/packages/77/ff/9c865275cd19290feba56344eba570e719efb7ca5b34d67ed12b22ebbb0d/cssselect-1.0.1.tar.gz";
596 url = "https://pypi.python.org/packages/77/ff/9c865275cd19290feba56344eba570e719efb7ca5b34d67ed12b22ebbb0d/cssselect-1.0.1.tar.gz";
584 md5 = "3fa03bf82a9f0b1223c0f1eb1369e139";
597 md5 = "3fa03bf82a9f0b1223c0f1eb1369e139";
585 };
598 };
586 meta = {
599 meta = {
587 license = [ pkgs.lib.licenses.bsdOriginal ];
600 license = [ pkgs.lib.licenses.bsdOriginal ];
588 };
601 };
589 };
602 };
590 decorator = super.buildPythonPackage {
603 decorator = super.buildPythonPackage {
591 name = "decorator-4.0.11";
604 name = "decorator-4.0.11";
592 buildInputs = with self; [];
605 buildInputs = with self; [];
593 doCheck = false;
606 doCheck = false;
594 propagatedBuildInputs = with self; [];
607 propagatedBuildInputs = with self; [];
595 src = fetchurl {
608 src = fetchurl {
596 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
609 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
597 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
610 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
598 };
611 };
599 meta = {
612 meta = {
600 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
613 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
601 };
614 };
602 };
615 };
603 deform = super.buildPythonPackage {
616 deform = super.buildPythonPackage {
604 name = "deform-2.0.4";
617 name = "deform-2.0.4";
605 buildInputs = with self; [];
618 buildInputs = with self; [];
606 doCheck = false;
619 doCheck = false;
607 propagatedBuildInputs = with self; [Chameleon colander iso8601 peppercorn translationstring zope.deprecation];
620 propagatedBuildInputs = with self; [Chameleon colander iso8601 peppercorn translationstring zope.deprecation];
608 src = fetchurl {
621 src = fetchurl {
609 url = "https://pypi.python.org/packages/66/3b/eefcb07abcab7a97f6665aa2d0cf1af741d9d6e78a2e4657fd2b89f89880/deform-2.0.4.tar.gz";
622 url = "https://pypi.python.org/packages/66/3b/eefcb07abcab7a97f6665aa2d0cf1af741d9d6e78a2e4657fd2b89f89880/deform-2.0.4.tar.gz";
610 md5 = "34756e42cf50dd4b4430809116c4ec0a";
623 md5 = "34756e42cf50dd4b4430809116c4ec0a";
611 };
624 };
612 meta = {
625 meta = {
613 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
626 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
614 };
627 };
615 };
628 };
616 docutils = super.buildPythonPackage {
629 docutils = super.buildPythonPackage {
617 name = "docutils-0.13.1";
630 name = "docutils-0.13.1";
618 buildInputs = with self; [];
631 buildInputs = with self; [];
619 doCheck = false;
632 doCheck = false;
620 propagatedBuildInputs = with self; [];
633 propagatedBuildInputs = with self; [];
621 src = fetchurl {
634 src = fetchurl {
622 url = "https://pypi.python.org/packages/05/25/7b5484aca5d46915493f1fd4ecb63c38c333bd32aa9ad6e19da8d08895ae/docutils-0.13.1.tar.gz";
635 url = "https://pypi.python.org/packages/05/25/7b5484aca5d46915493f1fd4ecb63c38c333bd32aa9ad6e19da8d08895ae/docutils-0.13.1.tar.gz";
623 md5 = "ea4a893c633c788be9b8078b6b305d53";
636 md5 = "ea4a893c633c788be9b8078b6b305d53";
624 };
637 };
625 meta = {
638 meta = {
626 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
639 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
627 };
640 };
628 };
641 };
629 dogpile.cache = super.buildPythonPackage {
642 dogpile.cache = super.buildPythonPackage {
630 name = "dogpile.cache-0.6.4";
643 name = "dogpile.cache-0.6.4";
631 buildInputs = with self; [];
644 buildInputs = with self; [];
632 doCheck = false;
645 doCheck = false;
633 propagatedBuildInputs = with self; [];
646 propagatedBuildInputs = with self; [];
634 src = fetchurl {
647 src = fetchurl {
635 url = "https://pypi.python.org/packages/b6/3d/35c05ca01c070bb70d9d422f2c4858ecb021b05b21af438fec5ccd7b945c/dogpile.cache-0.6.4.tar.gz";
648 url = "https://pypi.python.org/packages/b6/3d/35c05ca01c070bb70d9d422f2c4858ecb021b05b21af438fec5ccd7b945c/dogpile.cache-0.6.4.tar.gz";
636 md5 = "66e0a6cae6c08cb1ea25f89d0eadfeb0";
649 md5 = "66e0a6cae6c08cb1ea25f89d0eadfeb0";
637 };
650 };
638 meta = {
651 meta = {
639 license = [ pkgs.lib.licenses.bsdOriginal ];
652 license = [ pkgs.lib.licenses.bsdOriginal ];
640 };
653 };
641 };
654 };
642 dogpile.core = super.buildPythonPackage {
655 dogpile.core = super.buildPythonPackage {
643 name = "dogpile.core-0.4.1";
656 name = "dogpile.core-0.4.1";
644 buildInputs = with self; [];
657 buildInputs = with self; [];
645 doCheck = false;
658 doCheck = false;
646 propagatedBuildInputs = with self; [];
659 propagatedBuildInputs = with self; [];
647 src = fetchurl {
660 src = fetchurl {
648 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
661 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
649 md5 = "01cb19f52bba3e95c9b560f39341f045";
662 md5 = "01cb19f52bba3e95c9b560f39341f045";
650 };
663 };
651 meta = {
664 meta = {
652 license = [ pkgs.lib.licenses.bsdOriginal ];
665 license = [ pkgs.lib.licenses.bsdOriginal ];
653 };
666 };
654 };
667 };
655 ecdsa = super.buildPythonPackage {
668 ecdsa = super.buildPythonPackage {
656 name = "ecdsa-0.11";
669 name = "ecdsa-0.11";
657 buildInputs = with self; [];
670 buildInputs = with self; [];
658 doCheck = false;
671 doCheck = false;
659 propagatedBuildInputs = with self; [];
672 propagatedBuildInputs = with self; [];
660 src = fetchurl {
673 src = fetchurl {
661 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
674 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
662 md5 = "8ef586fe4dbb156697d756900cb41d7c";
675 md5 = "8ef586fe4dbb156697d756900cb41d7c";
663 };
676 };
664 meta = {
677 meta = {
665 license = [ pkgs.lib.licenses.mit ];
678 license = [ pkgs.lib.licenses.mit ];
666 };
679 };
667 };
680 };
668 elasticsearch = super.buildPythonPackage {
681 elasticsearch = super.buildPythonPackage {
669 name = "elasticsearch-2.3.0";
682 name = "elasticsearch-2.3.0";
670 buildInputs = with self; [];
683 buildInputs = with self; [];
671 doCheck = false;
684 doCheck = false;
672 propagatedBuildInputs = with self; [urllib3];
685 propagatedBuildInputs = with self; [urllib3];
673 src = fetchurl {
686 src = fetchurl {
674 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
687 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
675 md5 = "2550f3b51629cf1ef9636608af92c340";
688 md5 = "2550f3b51629cf1ef9636608af92c340";
676 };
689 };
677 meta = {
690 meta = {
678 license = [ pkgs.lib.licenses.asl20 ];
691 license = [ pkgs.lib.licenses.asl20 ];
679 };
692 };
680 };
693 };
681 elasticsearch-dsl = super.buildPythonPackage {
694 elasticsearch-dsl = super.buildPythonPackage {
682 name = "elasticsearch-dsl-2.2.0";
695 name = "elasticsearch-dsl-2.2.0";
683 buildInputs = with self; [];
696 buildInputs = with self; [];
684 doCheck = false;
697 doCheck = false;
685 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
698 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
686 src = fetchurl {
699 src = fetchurl {
687 url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz";
700 url = "https://pypi.python.org/packages/66/2f/52a086968788e58461641570f45c3207a52d46ebbe9b77dc22b6a8ffda66/elasticsearch-dsl-2.2.0.tar.gz";
688 md5 = "fa6bd3c87ea3caa8f0f051bc37c53221";
701 md5 = "fa6bd3c87ea3caa8f0f051bc37c53221";
689 };
702 };
690 meta = {
703 meta = {
691 license = [ pkgs.lib.licenses.asl20 ];
704 license = [ pkgs.lib.licenses.asl20 ];
692 };
705 };
693 };
706 };
694 entrypoints = super.buildPythonPackage {
707 entrypoints = super.buildPythonPackage {
695 name = "entrypoints-0.2.2";
708 name = "entrypoints-0.2.2";
696 buildInputs = with self; [];
709 buildInputs = with self; [];
697 doCheck = false;
710 doCheck = false;
698 propagatedBuildInputs = with self; [configparser];
711 propagatedBuildInputs = with self; [configparser];
699 src = fetchurl {
712 src = fetchurl {
700 url = "https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313";
713 url = "https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313";
701 md5 = "7db37771aea9ac9fefe093e5d6987313";
714 md5 = "7db37771aea9ac9fefe093e5d6987313";
702 };
715 };
703 meta = {
716 meta = {
704 license = [ pkgs.lib.licenses.mit ];
717 license = [ pkgs.lib.licenses.mit ];
705 };
718 };
706 };
719 };
707 enum34 = super.buildPythonPackage {
720 enum34 = super.buildPythonPackage {
708 name = "enum34-1.1.6";
721 name = "enum34-1.1.6";
709 buildInputs = with self; [];
722 buildInputs = with self; [];
710 doCheck = false;
723 doCheck = false;
711 propagatedBuildInputs = with self; [];
724 propagatedBuildInputs = with self; [];
712 src = fetchurl {
725 src = fetchurl {
713 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
726 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
714 md5 = "5f13a0841a61f7fc295c514490d120d0";
727 md5 = "5f13a0841a61f7fc295c514490d120d0";
715 };
728 };
716 meta = {
729 meta = {
717 license = [ pkgs.lib.licenses.bsdOriginal ];
730 license = [ pkgs.lib.licenses.bsdOriginal ];
718 };
731 };
719 };
732 };
720 funcsigs = super.buildPythonPackage {
733 funcsigs = super.buildPythonPackage {
721 name = "funcsigs-1.0.2";
734 name = "funcsigs-1.0.2";
722 buildInputs = with self; [];
735 buildInputs = with self; [];
723 doCheck = false;
736 doCheck = false;
724 propagatedBuildInputs = with self; [];
737 propagatedBuildInputs = with self; [];
725 src = fetchurl {
738 src = fetchurl {
726 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
739 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
727 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
740 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
728 };
741 };
729 meta = {
742 meta = {
730 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
743 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
731 };
744 };
732 };
745 };
733 functools32 = super.buildPythonPackage {
746 functools32 = super.buildPythonPackage {
734 name = "functools32-3.2.3.post2";
747 name = "functools32-3.2.3.post2";
735 buildInputs = with self; [];
748 buildInputs = with self; [];
736 doCheck = false;
749 doCheck = false;
737 propagatedBuildInputs = with self; [];
750 propagatedBuildInputs = with self; [];
738 src = fetchurl {
751 src = fetchurl {
739 url = "https://pypi.python.org/packages/5e/1a/0aa2c8195a204a9f51284018562dea77e25511f02fe924fac202fc012172/functools32-3.2.3-2.zip";
752 url = "https://pypi.python.org/packages/5e/1a/0aa2c8195a204a9f51284018562dea77e25511f02fe924fac202fc012172/functools32-3.2.3-2.zip";
740 md5 = "d55232eb132ec779e6893c902a0bc5ad";
753 md5 = "d55232eb132ec779e6893c902a0bc5ad";
741 };
754 };
742 meta = {
755 meta = {
743 license = [ pkgs.lib.licenses.psfl ];
756 license = [ pkgs.lib.licenses.psfl ];
744 };
757 };
745 };
758 };
746 future = super.buildPythonPackage {
759 future = super.buildPythonPackage {
747 name = "future-0.14.3";
760 name = "future-0.14.3";
748 buildInputs = with self; [];
761 buildInputs = with self; [];
749 doCheck = false;
762 doCheck = false;
750 propagatedBuildInputs = with self; [];
763 propagatedBuildInputs = with self; [];
751 src = fetchurl {
764 src = fetchurl {
752 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
765 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
753 md5 = "e94079b0bd1fc054929e8769fc0f6083";
766 md5 = "e94079b0bd1fc054929e8769fc0f6083";
754 };
767 };
755 meta = {
768 meta = {
756 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
769 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
757 };
770 };
758 };
771 };
759 futures = super.buildPythonPackage {
772 futures = super.buildPythonPackage {
760 name = "futures-3.0.2";
773 name = "futures-3.0.2";
761 buildInputs = with self; [];
774 buildInputs = with self; [];
762 doCheck = false;
775 doCheck = false;
763 propagatedBuildInputs = with self; [];
776 propagatedBuildInputs = with self; [];
764 src = fetchurl {
777 src = fetchurl {
765 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
778 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
766 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
779 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
767 };
780 };
768 meta = {
781 meta = {
769 license = [ pkgs.lib.licenses.bsdOriginal ];
782 license = [ pkgs.lib.licenses.bsdOriginal ];
770 };
783 };
771 };
784 };
772 gevent = super.buildPythonPackage {
785 gevent = super.buildPythonPackage {
773 name = "gevent-1.2.2";
786 name = "gevent-1.2.2";
774 buildInputs = with self; [];
787 buildInputs = with self; [];
775 doCheck = false;
788 doCheck = false;
776 propagatedBuildInputs = with self; [greenlet];
789 propagatedBuildInputs = with self; [greenlet];
777 src = fetchurl {
790 src = fetchurl {
778 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
791 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
779 md5 = "7f0baf355384fe5ff2ecf66853422554";
792 md5 = "7f0baf355384fe5ff2ecf66853422554";
780 };
793 };
781 meta = {
794 meta = {
782 license = [ pkgs.lib.licenses.mit ];
795 license = [ pkgs.lib.licenses.mit ];
783 };
796 };
784 };
797 };
785 gnureadline = super.buildPythonPackage {
798 gnureadline = super.buildPythonPackage {
786 name = "gnureadline-6.3.3";
799 name = "gnureadline-6.3.3";
787 buildInputs = with self; [];
800 buildInputs = with self; [];
788 doCheck = false;
801 doCheck = false;
789 propagatedBuildInputs = with self; [];
802 propagatedBuildInputs = with self; [];
790 src = fetchurl {
803 src = fetchurl {
791 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
804 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
792 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
805 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
793 };
806 };
794 meta = {
807 meta = {
795 license = [ pkgs.lib.licenses.gpl1 ];
808 license = [ pkgs.lib.licenses.gpl1 ];
796 };
809 };
797 };
810 };
798 gprof2dot = super.buildPythonPackage {
811 gprof2dot = super.buildPythonPackage {
799 name = "gprof2dot-2016.10.13";
812 name = "gprof2dot-2016.10.13";
800 buildInputs = with self; [];
813 buildInputs = with self; [];
801 doCheck = false;
814 doCheck = false;
802 propagatedBuildInputs = with self; [];
815 propagatedBuildInputs = with self; [];
803 src = fetchurl {
816 src = fetchurl {
804 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
817 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
805 md5 = "0125401f15fd2afe1df686a76c64a4fd";
818 md5 = "0125401f15fd2afe1df686a76c64a4fd";
806 };
819 };
807 meta = {
820 meta = {
808 license = [ { fullName = "LGPL"; } ];
821 license = [ { fullName = "LGPL"; } ];
809 };
822 };
810 };
823 };
811 graphviz = super.buildPythonPackage {
824 graphviz = super.buildPythonPackage {
812 name = "graphviz-0.7.1";
825 name = "graphviz-0.7.1";
813 buildInputs = with self; [];
826 buildInputs = with self; [];
814 doCheck = false;
827 doCheck = false;
815 propagatedBuildInputs = with self; [];
828 propagatedBuildInputs = with self; [];
816 src = fetchurl {
829 src = fetchurl {
817 url = "https://pypi.python.org/packages/7d/2d/f5cfa56467ca5a65eb44e1103d89d2f65dbc4f04cf7a1f3d38e973c3d1a8/graphviz-0.7.1.zip";
830 url = "https://pypi.python.org/packages/7d/2d/f5cfa56467ca5a65eb44e1103d89d2f65dbc4f04cf7a1f3d38e973c3d1a8/graphviz-0.7.1.zip";
818 md5 = "d5926e89975121d56dec777a79bfc9d1";
831 md5 = "d5926e89975121d56dec777a79bfc9d1";
819 };
832 };
820 meta = {
833 meta = {
821 license = [ pkgs.lib.licenses.mit ];
834 license = [ pkgs.lib.licenses.mit ];
822 };
835 };
823 };
836 };
824 greenlet = super.buildPythonPackage {
837 greenlet = super.buildPythonPackage {
825 name = "greenlet-0.4.12";
838 name = "greenlet-0.4.12";
826 buildInputs = with self; [];
839 buildInputs = with self; [];
827 doCheck = false;
840 doCheck = false;
828 propagatedBuildInputs = with self; [];
841 propagatedBuildInputs = with self; [];
829 src = fetchurl {
842 src = fetchurl {
830 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
843 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
831 md5 = "e8637647d58a26c4a1f51ca393e53c00";
844 md5 = "e8637647d58a26c4a1f51ca393e53c00";
832 };
845 };
833 meta = {
846 meta = {
834 license = [ pkgs.lib.licenses.mit ];
847 license = [ pkgs.lib.licenses.mit ];
835 };
848 };
836 };
849 };
837 gunicorn = super.buildPythonPackage {
850 gunicorn = super.buildPythonPackage {
838 name = "gunicorn-19.7.1";
851 name = "gunicorn-19.7.1";
839 buildInputs = with self; [];
852 buildInputs = with self; [];
840 doCheck = false;
853 doCheck = false;
841 propagatedBuildInputs = with self; [];
854 propagatedBuildInputs = with self; [];
842 src = fetchurl {
855 src = fetchurl {
843 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
856 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
844 md5 = "174d3c3cd670a5be0404d84c484e590c";
857 md5 = "174d3c3cd670a5be0404d84c484e590c";
845 };
858 };
846 meta = {
859 meta = {
847 license = [ pkgs.lib.licenses.mit ];
860 license = [ pkgs.lib.licenses.mit ];
848 };
861 };
849 };
862 };
850 html5lib = super.buildPythonPackage {
863 html5lib = super.buildPythonPackage {
851 name = "html5lib-0.9999999";
864 name = "html5lib-0.9999999";
852 buildInputs = with self; [];
865 buildInputs = with self; [];
853 doCheck = false;
866 doCheck = false;
854 propagatedBuildInputs = with self; [six];
867 propagatedBuildInputs = with self; [six];
855 src = fetchurl {
868 src = fetchurl {
856 url = "https://pypi.python.org/packages/ae/ae/bcb60402c60932b32dfaf19bb53870b29eda2cd17551ba5639219fb5ebf9/html5lib-0.9999999.tar.gz";
869 url = "https://pypi.python.org/packages/ae/ae/bcb60402c60932b32dfaf19bb53870b29eda2cd17551ba5639219fb5ebf9/html5lib-0.9999999.tar.gz";
857 md5 = "ef43cb05e9e799f25d65d1135838a96f";
870 md5 = "ef43cb05e9e799f25d65d1135838a96f";
858 };
871 };
859 meta = {
872 meta = {
860 license = [ pkgs.lib.licenses.mit ];
873 license = [ pkgs.lib.licenses.mit ];
861 };
874 };
862 };
875 };
863 infrae.cache = super.buildPythonPackage {
876 infrae.cache = super.buildPythonPackage {
864 name = "infrae.cache-1.0.1";
877 name = "infrae.cache-1.0.1";
865 buildInputs = with self; [];
878 buildInputs = with self; [];
866 doCheck = false;
879 doCheck = false;
867 propagatedBuildInputs = with self; [Beaker repoze.lru];
880 propagatedBuildInputs = with self; [Beaker repoze.lru];
868 src = fetchurl {
881 src = fetchurl {
869 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
882 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
870 md5 = "b09076a766747e6ed2a755cc62088e32";
883 md5 = "b09076a766747e6ed2a755cc62088e32";
871 };
884 };
872 meta = {
885 meta = {
873 license = [ pkgs.lib.licenses.zpt21 ];
886 license = [ pkgs.lib.licenses.zpt21 ];
874 };
887 };
875 };
888 };
876 invoke = super.buildPythonPackage {
889 invoke = super.buildPythonPackage {
877 name = "invoke-0.13.0";
890 name = "invoke-0.13.0";
878 buildInputs = with self; [];
891 buildInputs = with self; [];
879 doCheck = false;
892 doCheck = false;
880 propagatedBuildInputs = with self; [];
893 propagatedBuildInputs = with self; [];
881 src = fetchurl {
894 src = fetchurl {
882 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
895 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
883 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
896 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
884 };
897 };
885 meta = {
898 meta = {
886 license = [ pkgs.lib.licenses.bsdOriginal ];
899 license = [ pkgs.lib.licenses.bsdOriginal ];
887 };
900 };
888 };
901 };
889 ipdb = super.buildPythonPackage {
902 ipdb = super.buildPythonPackage {
890 name = "ipdb-0.10.3";
903 name = "ipdb-0.10.3";
891 buildInputs = with self; [];
904 buildInputs = with self; [];
892 doCheck = false;
905 doCheck = false;
893 propagatedBuildInputs = with self; [setuptools ipython];
906 propagatedBuildInputs = with self; [setuptools ipython];
894 src = fetchurl {
907 src = fetchurl {
895 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
908 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
896 md5 = "def1f6ac075d54bdee07e6501263d4fa";
909 md5 = "def1f6ac075d54bdee07e6501263d4fa";
897 };
910 };
898 meta = {
911 meta = {
899 license = [ pkgs.lib.licenses.bsdOriginal ];
912 license = [ pkgs.lib.licenses.bsdOriginal ];
900 };
913 };
901 };
914 };
902 ipython = super.buildPythonPackage {
915 ipython = super.buildPythonPackage {
903 name = "ipython-5.1.0";
916 name = "ipython-5.1.0";
904 buildInputs = with self; [];
917 buildInputs = with self; [];
905 doCheck = false;
918 doCheck = false;
906 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
919 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit Pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
907 src = fetchurl {
920 src = fetchurl {
908 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
921 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
909 md5 = "47c8122420f65b58784cb4b9b4af35e3";
922 md5 = "47c8122420f65b58784cb4b9b4af35e3";
910 };
923 };
911 meta = {
924 meta = {
912 license = [ pkgs.lib.licenses.bsdOriginal ];
925 license = [ pkgs.lib.licenses.bsdOriginal ];
913 };
926 };
914 };
927 };
915 ipython-genutils = super.buildPythonPackage {
928 ipython-genutils = super.buildPythonPackage {
916 name = "ipython-genutils-0.2.0";
929 name = "ipython-genutils-0.2.0";
917 buildInputs = with self; [];
930 buildInputs = with self; [];
918 doCheck = false;
931 doCheck = false;
919 propagatedBuildInputs = with self; [];
932 propagatedBuildInputs = with self; [];
920 src = fetchurl {
933 src = fetchurl {
921 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
934 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
922 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
935 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
923 };
936 };
924 meta = {
937 meta = {
925 license = [ pkgs.lib.licenses.bsdOriginal ];
938 license = [ pkgs.lib.licenses.bsdOriginal ];
926 };
939 };
927 };
940 };
928 iso8601 = super.buildPythonPackage {
941 iso8601 = super.buildPythonPackage {
929 name = "iso8601-0.1.11";
942 name = "iso8601-0.1.11";
930 buildInputs = with self; [];
943 buildInputs = with self; [];
931 doCheck = false;
944 doCheck = false;
932 propagatedBuildInputs = with self; [];
945 propagatedBuildInputs = with self; [];
933 src = fetchurl {
946 src = fetchurl {
934 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
947 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
935 md5 = "b06d11cd14a64096f907086044f0fe38";
948 md5 = "b06d11cd14a64096f907086044f0fe38";
936 };
949 };
937 meta = {
950 meta = {
938 license = [ pkgs.lib.licenses.mit ];
951 license = [ pkgs.lib.licenses.mit ];
939 };
952 };
940 };
953 };
941 itsdangerous = super.buildPythonPackage {
954 itsdangerous = super.buildPythonPackage {
942 name = "itsdangerous-0.24";
955 name = "itsdangerous-0.24";
943 buildInputs = with self; [];
956 buildInputs = with self; [];
944 doCheck = false;
957 doCheck = false;
945 propagatedBuildInputs = with self; [];
958 propagatedBuildInputs = with self; [];
946 src = fetchurl {
959 src = fetchurl {
947 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
960 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
948 md5 = "a3d55aa79369aef5345c036a8a26307f";
961 md5 = "a3d55aa79369aef5345c036a8a26307f";
949 };
962 };
950 meta = {
963 meta = {
951 license = [ pkgs.lib.licenses.bsdOriginal ];
964 license = [ pkgs.lib.licenses.bsdOriginal ];
952 };
965 };
953 };
966 };
954 jsonschema = super.buildPythonPackage {
967 jsonschema = super.buildPythonPackage {
955 name = "jsonschema-2.6.0";
968 name = "jsonschema-2.6.0";
956 buildInputs = with self; [];
969 buildInputs = with self; [];
957 doCheck = false;
970 doCheck = false;
958 propagatedBuildInputs = with self; [functools32];
971 propagatedBuildInputs = with self; [functools32];
959 src = fetchurl {
972 src = fetchurl {
960 url = "https://pypi.python.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
973 url = "https://pypi.python.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
961 md5 = "50c6b69a373a8b55ff1e0ec6e78f13f4";
974 md5 = "50c6b69a373a8b55ff1e0ec6e78f13f4";
962 };
975 };
963 meta = {
976 meta = {
964 license = [ pkgs.lib.licenses.mit ];
977 license = [ pkgs.lib.licenses.mit ];
965 };
978 };
966 };
979 };
967 jupyter-client = super.buildPythonPackage {
980 jupyter-client = super.buildPythonPackage {
968 name = "jupyter-client-5.0.0";
981 name = "jupyter-client-5.0.0";
969 buildInputs = with self; [];
982 buildInputs = with self; [];
970 doCheck = false;
983 doCheck = false;
971 propagatedBuildInputs = with self; [traitlets jupyter-core pyzmq python-dateutil];
984 propagatedBuildInputs = with self; [traitlets jupyter-core pyzmq python-dateutil];
972 src = fetchurl {
985 src = fetchurl {
973 url = "https://pypi.python.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
986 url = "https://pypi.python.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
974 md5 = "1acd331b5c9fb4d79dae9939e79f2426";
987 md5 = "1acd331b5c9fb4d79dae9939e79f2426";
975 };
988 };
976 meta = {
989 meta = {
977 license = [ pkgs.lib.licenses.bsdOriginal ];
990 license = [ pkgs.lib.licenses.bsdOriginal ];
978 };
991 };
979 };
992 };
980 jupyter-core = super.buildPythonPackage {
993 jupyter-core = super.buildPythonPackage {
981 name = "jupyter-core-4.3.0";
994 name = "jupyter-core-4.3.0";
982 buildInputs = with self; [];
995 buildInputs = with self; [];
983 doCheck = false;
996 doCheck = false;
984 propagatedBuildInputs = with self; [traitlets];
997 propagatedBuildInputs = with self; [traitlets];
985 src = fetchurl {
998 src = fetchurl {
986 url = "https://pypi.python.org/packages/2f/39/5138f975100ce14d150938df48a83cd852a3fd8e24b1244f4113848e69e2/jupyter_core-4.3.0.tar.gz";
999 url = "https://pypi.python.org/packages/2f/39/5138f975100ce14d150938df48a83cd852a3fd8e24b1244f4113848e69e2/jupyter_core-4.3.0.tar.gz";
987 md5 = "18819511a809afdeed9a995a9c27bcfb";
1000 md5 = "18819511a809afdeed9a995a9c27bcfb";
988 };
1001 };
989 meta = {
1002 meta = {
990 license = [ pkgs.lib.licenses.bsdOriginal ];
1003 license = [ pkgs.lib.licenses.bsdOriginal ];
991 };
1004 };
992 };
1005 };
1006 hupper = super.buildPythonPackage {
1007 name = "hupper-1.0";
1008 buildInputs = with self; [];
1009 doCheck = false;
1010 propagatedBuildInputs = with self; [];
1011 src = fetchurl {
1012 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
1013 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
1014 };
1015 meta = {
1016 license = [ pkgs.lib.licenses.mit ];
1017 };
1018 };
993 kombu = super.buildPythonPackage {
1019 kombu = super.buildPythonPackage {
994 name = "kombu-1.5.1";
1020 name = "kombu-1.5.1";
995 buildInputs = with self; [];
1021 buildInputs = with self; [];
996 doCheck = false;
1022 doCheck = false;
997 propagatedBuildInputs = with self; [anyjson amqplib];
1023 propagatedBuildInputs = with self; [anyjson amqplib];
998 src = fetchurl {
1024 src = fetchurl {
999 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
1025 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
1000 md5 = "50662f3c7e9395b3d0721fb75d100b63";
1026 md5 = "50662f3c7e9395b3d0721fb75d100b63";
1001 };
1027 };
1002 meta = {
1028 meta = {
1003 license = [ pkgs.lib.licenses.bsdOriginal ];
1029 license = [ pkgs.lib.licenses.bsdOriginal ];
1004 };
1030 };
1005 };
1031 };
1006 lxml = super.buildPythonPackage {
1032 lxml = super.buildPythonPackage {
1007 name = "lxml-3.7.3";
1033 name = "lxml-3.7.3";
1008 buildInputs = with self; [];
1034 buildInputs = with self; [];
1009 doCheck = false;
1035 doCheck = false;
1010 propagatedBuildInputs = with self; [];
1036 propagatedBuildInputs = with self; [];
1011 src = fetchurl {
1037 src = fetchurl {
1012 url = "https://pypi.python.org/packages/39/e8/a8e0b1fa65dd021d48fe21464f71783655f39a41f218293c1c590d54eb82/lxml-3.7.3.tar.gz";
1038 url = "https://pypi.python.org/packages/39/e8/a8e0b1fa65dd021d48fe21464f71783655f39a41f218293c1c590d54eb82/lxml-3.7.3.tar.gz";
1013 md5 = "075692ce442e69bbd604d44e21c02753";
1039 md5 = "075692ce442e69bbd604d44e21c02753";
1014 };
1040 };
1015 meta = {
1041 meta = {
1016 license = [ pkgs.lib.licenses.bsdOriginal ];
1042 license = [ pkgs.lib.licenses.bsdOriginal ];
1017 };
1043 };
1018 };
1044 };
1019 meld3 = super.buildPythonPackage {
1045 meld3 = super.buildPythonPackage {
1020 name = "meld3-1.0.2";
1046 name = "meld3-1.0.2";
1021 buildInputs = with self; [];
1047 buildInputs = with self; [];
1022 doCheck = false;
1048 doCheck = false;
1023 propagatedBuildInputs = with self; [];
1049 propagatedBuildInputs = with self; [];
1024 src = fetchurl {
1050 src = fetchurl {
1025 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
1051 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
1026 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
1052 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
1027 };
1053 };
1028 meta = {
1054 meta = {
1029 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1055 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1030 };
1056 };
1031 };
1057 };
1032 mistune = super.buildPythonPackage {
1058 mistune = super.buildPythonPackage {
1033 name = "mistune-0.7.4";
1059 name = "mistune-0.7.4";
1034 buildInputs = with self; [];
1060 buildInputs = with self; [];
1035 doCheck = false;
1061 doCheck = false;
1036 propagatedBuildInputs = with self; [];
1062 propagatedBuildInputs = with self; [];
1037 src = fetchurl {
1063 src = fetchurl {
1038 url = "https://pypi.python.org/packages/25/a4/12a584c0c59c9fed529f8b3c47ca8217c0cf8bcc5e1089d3256410cfbdbc/mistune-0.7.4.tar.gz";
1064 url = "https://pypi.python.org/packages/25/a4/12a584c0c59c9fed529f8b3c47ca8217c0cf8bcc5e1089d3256410cfbdbc/mistune-0.7.4.tar.gz";
1039 md5 = "92d01cb717e9e74429e9bde9d29ac43b";
1065 md5 = "92d01cb717e9e74429e9bde9d29ac43b";
1040 };
1066 };
1041 meta = {
1067 meta = {
1042 license = [ pkgs.lib.licenses.bsdOriginal ];
1068 license = [ pkgs.lib.licenses.bsdOriginal ];
1043 };
1069 };
1044 };
1070 };
1045 mock = super.buildPythonPackage {
1071 mock = super.buildPythonPackage {
1046 name = "mock-1.0.1";
1072 name = "mock-1.0.1";
1047 buildInputs = with self; [];
1073 buildInputs = with self; [];
1048 doCheck = false;
1074 doCheck = false;
1049 propagatedBuildInputs = with self; [];
1075 propagatedBuildInputs = with self; [];
1050 src = fetchurl {
1076 src = fetchurl {
1051 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
1077 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
1052 md5 = "869f08d003c289a97c1a6610faf5e913";
1078 md5 = "869f08d003c289a97c1a6610faf5e913";
1053 };
1079 };
1054 meta = {
1080 meta = {
1055 license = [ pkgs.lib.licenses.bsdOriginal ];
1081 license = [ pkgs.lib.licenses.bsdOriginal ];
1056 };
1082 };
1057 };
1083 };
1058 msgpack-python = super.buildPythonPackage {
1084 msgpack-python = super.buildPythonPackage {
1059 name = "msgpack-python-0.4.8";
1085 name = "msgpack-python-0.4.8";
1060 buildInputs = with self; [];
1086 buildInputs = with self; [];
1061 doCheck = false;
1087 doCheck = false;
1062 propagatedBuildInputs = with self; [];
1088 propagatedBuildInputs = with self; [];
1063 src = fetchurl {
1089 src = fetchurl {
1064 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
1090 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
1065 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
1091 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
1066 };
1092 };
1067 meta = {
1093 meta = {
1068 license = [ pkgs.lib.licenses.asl20 ];
1094 license = [ pkgs.lib.licenses.asl20 ];
1069 };
1095 };
1070 };
1096 };
1071 nbconvert = super.buildPythonPackage {
1097 nbconvert = super.buildPythonPackage {
1072 name = "nbconvert-5.1.1";
1098 name = "nbconvert-5.1.1";
1073 buildInputs = with self; [];
1099 buildInputs = with self; [];
1074 doCheck = false;
1100 doCheck = false;
1075 propagatedBuildInputs = with self; [mistune Jinja2 Pygments traitlets jupyter-core nbformat entrypoints bleach pandocfilters testpath];
1101 propagatedBuildInputs = with self; [mistune Jinja2 Pygments traitlets jupyter-core nbformat entrypoints bleach pandocfilters testpath];
1076 src = fetchurl {
1102 src = fetchurl {
1077 url = "https://pypi.python.org/packages/95/58/df1c91f1658ee5df19097f915a1e71c91fc824a708d82d2b2e35f8b80e9a/nbconvert-5.1.1.tar.gz";
1103 url = "https://pypi.python.org/packages/95/58/df1c91f1658ee5df19097f915a1e71c91fc824a708d82d2b2e35f8b80e9a/nbconvert-5.1.1.tar.gz";
1078 md5 = "d0263fb03a44db2f94eea09a608ed813";
1104 md5 = "d0263fb03a44db2f94eea09a608ed813";
1079 };
1105 };
1080 meta = {
1106 meta = {
1081 license = [ pkgs.lib.licenses.bsdOriginal ];
1107 license = [ pkgs.lib.licenses.bsdOriginal ];
1082 };
1108 };
1083 };
1109 };
1084 nbformat = super.buildPythonPackage {
1110 nbformat = super.buildPythonPackage {
1085 name = "nbformat-4.3.0";
1111 name = "nbformat-4.3.0";
1086 buildInputs = with self; [];
1112 buildInputs = with self; [];
1087 doCheck = false;
1113 doCheck = false;
1088 propagatedBuildInputs = with self; [ipython-genutils traitlets jsonschema jupyter-core];
1114 propagatedBuildInputs = with self; [ipython-genutils traitlets jsonschema jupyter-core];
1089 src = fetchurl {
1115 src = fetchurl {
1090 url = "https://pypi.python.org/packages/f9/c5/89df4abf906f766727f976e170caa85b4f1c1d1feb1f45d716016e68e19f/nbformat-4.3.0.tar.gz";
1116 url = "https://pypi.python.org/packages/f9/c5/89df4abf906f766727f976e170caa85b4f1c1d1feb1f45d716016e68e19f/nbformat-4.3.0.tar.gz";
1091 md5 = "9a00d20425914cd5ba5f97769d9963ca";
1117 md5 = "9a00d20425914cd5ba5f97769d9963ca";
1092 };
1118 };
1093 meta = {
1119 meta = {
1094 license = [ pkgs.lib.licenses.bsdOriginal ];
1120 license = [ pkgs.lib.licenses.bsdOriginal ];
1095 };
1121 };
1096 };
1122 };
1097 nose = super.buildPythonPackage {
1123 nose = super.buildPythonPackage {
1098 name = "nose-1.3.6";
1124 name = "nose-1.3.6";
1099 buildInputs = with self; [];
1125 buildInputs = with self; [];
1100 doCheck = false;
1126 doCheck = false;
1101 propagatedBuildInputs = with self; [];
1127 propagatedBuildInputs = with self; [];
1102 src = fetchurl {
1128 src = fetchurl {
1103 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
1129 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
1104 md5 = "0ca546d81ca8309080fc80cb389e7a16";
1130 md5 = "0ca546d81ca8309080fc80cb389e7a16";
1105 };
1131 };
1106 meta = {
1132 meta = {
1107 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
1133 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
1108 };
1134 };
1109 };
1135 };
1110 objgraph = super.buildPythonPackage {
1136 objgraph = super.buildPythonPackage {
1111 name = "objgraph-3.1.0";
1137 name = "objgraph-3.1.0";
1112 buildInputs = with self; [];
1138 buildInputs = with self; [];
1113 doCheck = false;
1139 doCheck = false;
1114 propagatedBuildInputs = with self; [graphviz];
1140 propagatedBuildInputs = with self; [graphviz];
1115 src = fetchurl {
1141 src = fetchurl {
1116 url = "https://pypi.python.org/packages/f4/b3/082e54e62094cb2ec84f8d5a49e0142cef99016491cecba83309cff920ae/objgraph-3.1.0.tar.gz";
1142 url = "https://pypi.python.org/packages/f4/b3/082e54e62094cb2ec84f8d5a49e0142cef99016491cecba83309cff920ae/objgraph-3.1.0.tar.gz";
1117 md5 = "eddbd96039796bfbd13eee403701e64a";
1143 md5 = "eddbd96039796bfbd13eee403701e64a";
1118 };
1144 };
1119 meta = {
1145 meta = {
1120 license = [ pkgs.lib.licenses.mit ];
1146 license = [ pkgs.lib.licenses.mit ];
1121 };
1147 };
1122 };
1148 };
1123 packaging = super.buildPythonPackage {
1149 packaging = super.buildPythonPackage {
1124 name = "packaging-15.2";
1150 name = "packaging-15.2";
1125 buildInputs = with self; [];
1151 buildInputs = with self; [];
1126 doCheck = false;
1152 doCheck = false;
1127 propagatedBuildInputs = with self; [];
1153 propagatedBuildInputs = with self; [];
1128 src = fetchurl {
1154 src = fetchurl {
1129 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1155 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1130 md5 = "c16093476f6ced42128bf610e5db3784";
1156 md5 = "c16093476f6ced42128bf610e5db3784";
1131 };
1157 };
1132 meta = {
1158 meta = {
1133 license = [ pkgs.lib.licenses.asl20 ];
1159 license = [ pkgs.lib.licenses.asl20 ];
1134 };
1160 };
1135 };
1161 };
1136 pandocfilters = super.buildPythonPackage {
1162 pandocfilters = super.buildPythonPackage {
1137 name = "pandocfilters-1.4.1";
1163 name = "pandocfilters-1.4.1";
1138 buildInputs = with self; [];
1164 buildInputs = with self; [];
1139 doCheck = false;
1165 doCheck = false;
1140 propagatedBuildInputs = with self; [];
1166 propagatedBuildInputs = with self; [];
1141 src = fetchurl {
1167 src = fetchurl {
1142 url = "https://pypi.python.org/packages/e3/1f/21d1b7e8ca571e80b796c758d361fdf5554335ff138158654684bc5401d8/pandocfilters-1.4.1.tar.gz";
1168 url = "https://pypi.python.org/packages/e3/1f/21d1b7e8ca571e80b796c758d361fdf5554335ff138158654684bc5401d8/pandocfilters-1.4.1.tar.gz";
1143 md5 = "7680d9f9ec07397dd17f380ee3818b9d";
1169 md5 = "7680d9f9ec07397dd17f380ee3818b9d";
1144 };
1170 };
1145 meta = {
1171 meta = {
1146 license = [ pkgs.lib.licenses.bsdOriginal ];
1172 license = [ pkgs.lib.licenses.bsdOriginal ];
1147 };
1173 };
1148 };
1174 };
1149 paramiko = super.buildPythonPackage {
1175 paramiko = super.buildPythonPackage {
1150 name = "paramiko-1.15.1";
1176 name = "paramiko-1.15.1";
1151 buildInputs = with self; [];
1177 buildInputs = with self; [];
1152 doCheck = false;
1178 doCheck = false;
1153 propagatedBuildInputs = with self; [pycrypto ecdsa];
1179 propagatedBuildInputs = with self; [pycrypto ecdsa];
1154 src = fetchurl {
1180 src = fetchurl {
1155 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
1181 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
1156 md5 = "48c274c3f9b1282932567b21f6acf3b5";
1182 md5 = "48c274c3f9b1282932567b21f6acf3b5";
1157 };
1183 };
1158 meta = {
1184 meta = {
1159 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1185 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1160 };
1186 };
1161 };
1187 };
1162 pathlib2 = super.buildPythonPackage {
1188 pathlib2 = super.buildPythonPackage {
1163 name = "pathlib2-2.3.0";
1189 name = "pathlib2-2.3.0";
1164 buildInputs = with self; [];
1190 buildInputs = with self; [];
1165 doCheck = false;
1191 doCheck = false;
1166 propagatedBuildInputs = with self; [six scandir];
1192 propagatedBuildInputs = with self; [six scandir];
1167 src = fetchurl {
1193 src = fetchurl {
1168 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
1194 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
1169 md5 = "89c90409d11fd5947966b6a30a47d18c";
1195 md5 = "89c90409d11fd5947966b6a30a47d18c";
1170 };
1196 };
1171 meta = {
1197 meta = {
1172 license = [ pkgs.lib.licenses.mit ];
1198 license = [ pkgs.lib.licenses.mit ];
1173 };
1199 };
1174 };
1200 };
1175 peppercorn = super.buildPythonPackage {
1201 peppercorn = super.buildPythonPackage {
1176 name = "peppercorn-0.5";
1202 name = "peppercorn-0.5";
1177 buildInputs = with self; [];
1203 buildInputs = with self; [];
1178 doCheck = false;
1204 doCheck = false;
1179 propagatedBuildInputs = with self; [];
1205 propagatedBuildInputs = with self; [];
1180 src = fetchurl {
1206 src = fetchurl {
1181 url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz";
1207 url = "https://pypi.python.org/packages/45/ec/a62ec317d1324a01567c5221b420742f094f05ee48097e5157d32be3755c/peppercorn-0.5.tar.gz";
1182 md5 = "f08efbca5790019ab45d76b7244abd40";
1208 md5 = "f08efbca5790019ab45d76b7244abd40";
1183 };
1209 };
1184 meta = {
1210 meta = {
1185 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1211 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1186 };
1212 };
1187 };
1213 };
1188 pexpect = super.buildPythonPackage {
1214 pexpect = super.buildPythonPackage {
1189 name = "pexpect-4.2.1";
1215 name = "pexpect-4.2.1";
1190 buildInputs = with self; [];
1216 buildInputs = with self; [];
1191 doCheck = false;
1217 doCheck = false;
1192 propagatedBuildInputs = with self; [ptyprocess];
1218 propagatedBuildInputs = with self; [ptyprocess];
1193 src = fetchurl {
1219 src = fetchurl {
1194 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
1220 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
1195 md5 = "3694410001a99dff83f0b500a1ca1c95";
1221 md5 = "3694410001a99dff83f0b500a1ca1c95";
1196 };
1222 };
1197 meta = {
1223 meta = {
1198 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1224 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1199 };
1225 };
1200 };
1226 };
1201 pickleshare = super.buildPythonPackage {
1227 pickleshare = super.buildPythonPackage {
1202 name = "pickleshare-0.7.4";
1228 name = "pickleshare-0.7.4";
1203 buildInputs = with self; [];
1229 buildInputs = with self; [];
1204 doCheck = false;
1230 doCheck = false;
1205 propagatedBuildInputs = with self; [pathlib2];
1231 propagatedBuildInputs = with self; [pathlib2];
1206 src = fetchurl {
1232 src = fetchurl {
1207 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
1233 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
1208 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
1234 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
1209 };
1235 };
1210 meta = {
1236 meta = {
1211 license = [ pkgs.lib.licenses.mit ];
1237 license = [ pkgs.lib.licenses.mit ];
1212 };
1238 };
1213 };
1239 };
1240 plaster = super.buildPythonPackage {
1241 name = "plaster-0.5";
1242 buildInputs = with self; [];
1243 doCheck = false;
1244 propagatedBuildInputs = with self; [setuptools];
1245 src = fetchurl {
1246 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
1247 md5 = "c59345a67a860cfcaa1bd6a81451399d";
1248 };
1249 meta = {
1250 license = [ pkgs.lib.licenses.mit ];
1251 };
1252 };
1253 plaster-pastedeploy = super.buildPythonPackage {
1254 name = "plaster-pastedeploy-0.4.1";
1255 buildInputs = with self; [];
1256 doCheck = false;
1257 propagatedBuildInputs = with self; [PasteDeploy plaster];
1258 src = fetchurl {
1259 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
1260 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
1261 };
1262 meta = {
1263 license = [ pkgs.lib.licenses.mit ];
1264 };
1265 };
1214 prompt-toolkit = super.buildPythonPackage {
1266 prompt-toolkit = super.buildPythonPackage {
1215 name = "prompt-toolkit-1.0.14";
1267 name = "prompt-toolkit-1.0.14";
1216 buildInputs = with self; [];
1268 buildInputs = with self; [];
1217 doCheck = false;
1269 doCheck = false;
1218 propagatedBuildInputs = with self; [six wcwidth];
1270 propagatedBuildInputs = with self; [six wcwidth];
1219 src = fetchurl {
1271 src = fetchurl {
1220 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
1272 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
1221 md5 = "f24061ae133ed32c6b764e92bd48c496";
1273 md5 = "f24061ae133ed32c6b764e92bd48c496";
1222 };
1274 };
1223 meta = {
1275 meta = {
1224 license = [ pkgs.lib.licenses.bsdOriginal ];
1276 license = [ pkgs.lib.licenses.bsdOriginal ];
1225 };
1277 };
1226 };
1278 };
1227 psutil = super.buildPythonPackage {
1279 psutil = super.buildPythonPackage {
1228 name = "psutil-4.3.1";
1280 name = "psutil-4.3.1";
1229 buildInputs = with self; [];
1281 buildInputs = with self; [];
1230 doCheck = false;
1282 doCheck = false;
1231 propagatedBuildInputs = with self; [];
1283 propagatedBuildInputs = with self; [];
1232 src = fetchurl {
1284 src = fetchurl {
1233 url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz";
1285 url = "https://pypi.python.org/packages/78/cc/f267a1371f229bf16db6a4e604428c3b032b823b83155bd33cef45e49a53/psutil-4.3.1.tar.gz";
1234 md5 = "199a366dba829c88bddaf5b41d19ddc0";
1286 md5 = "199a366dba829c88bddaf5b41d19ddc0";
1235 };
1287 };
1236 meta = {
1288 meta = {
1237 license = [ pkgs.lib.licenses.bsdOriginal ];
1289 license = [ pkgs.lib.licenses.bsdOriginal ];
1238 };
1290 };
1239 };
1291 };
1240 psycopg2 = super.buildPythonPackage {
1292 psycopg2 = super.buildPythonPackage {
1241 name = "psycopg2-2.7.1";
1293 name = "psycopg2-2.7.1";
1242 buildInputs = with self; [];
1294 buildInputs = with self; [];
1243 doCheck = false;
1295 doCheck = false;
1244 propagatedBuildInputs = with self; [];
1296 propagatedBuildInputs = with self; [];
1245 src = fetchurl {
1297 src = fetchurl {
1246 url = "https://pypi.python.org/packages/f8/e9/5793369ce8a41bf5467623ded8d59a434dfef9c136351aca4e70c2657ba0/psycopg2-2.7.1.tar.gz";
1298 url = "https://pypi.python.org/packages/f8/e9/5793369ce8a41bf5467623ded8d59a434dfef9c136351aca4e70c2657ba0/psycopg2-2.7.1.tar.gz";
1247 md5 = "67848ac33af88336046802f6ef7081f3";
1299 md5 = "67848ac33af88336046802f6ef7081f3";
1248 };
1300 };
1249 meta = {
1301 meta = {
1250 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1302 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1251 };
1303 };
1252 };
1304 };
1253 ptyprocess = super.buildPythonPackage {
1305 ptyprocess = super.buildPythonPackage {
1254 name = "ptyprocess-0.5.2";
1306 name = "ptyprocess-0.5.2";
1255 buildInputs = with self; [];
1307 buildInputs = with self; [];
1256 doCheck = false;
1308 doCheck = false;
1257 propagatedBuildInputs = with self; [];
1309 propagatedBuildInputs = with self; [];
1258 src = fetchurl {
1310 src = fetchurl {
1259 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
1311 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
1260 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
1312 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
1261 };
1313 };
1262 meta = {
1314 meta = {
1263 license = [ ];
1315 license = [ ];
1264 };
1316 };
1265 };
1317 };
1266 py = super.buildPythonPackage {
1318 py = super.buildPythonPackage {
1267 name = "py-1.4.34";
1319 name = "py-1.4.34";
1268 buildInputs = with self; [];
1320 buildInputs = with self; [];
1269 doCheck = false;
1321 doCheck = false;
1270 propagatedBuildInputs = with self; [];
1322 propagatedBuildInputs = with self; [];
1271 src = fetchurl {
1323 src = fetchurl {
1272 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
1324 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
1273 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
1325 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
1274 };
1326 };
1275 meta = {
1327 meta = {
1276 license = [ pkgs.lib.licenses.mit ];
1328 license = [ pkgs.lib.licenses.mit ];
1277 };
1329 };
1278 };
1330 };
1279 py-bcrypt = super.buildPythonPackage {
1331 py-bcrypt = super.buildPythonPackage {
1280 name = "py-bcrypt-0.4";
1332 name = "py-bcrypt-0.4";
1281 buildInputs = with self; [];
1333 buildInputs = with self; [];
1282 doCheck = false;
1334 doCheck = false;
1283 propagatedBuildInputs = with self; [];
1335 propagatedBuildInputs = with self; [];
1284 src = fetchurl {
1336 src = fetchurl {
1285 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1337 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1286 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1338 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1287 };
1339 };
1288 meta = {
1340 meta = {
1289 license = [ pkgs.lib.licenses.bsdOriginal ];
1341 license = [ pkgs.lib.licenses.bsdOriginal ];
1290 };
1342 };
1291 };
1343 };
1292 py-gfm = super.buildPythonPackage {
1344 py-gfm = super.buildPythonPackage {
1293 name = "py-gfm-0.1.3";
1345 name = "py-gfm-0.1.3";
1294 buildInputs = with self; [];
1346 buildInputs = with self; [];
1295 doCheck = false;
1347 doCheck = false;
1296 propagatedBuildInputs = with self; [setuptools Markdown];
1348 propagatedBuildInputs = with self; [setuptools Markdown];
1297 src = fetchurl {
1349 src = fetchurl {
1298 url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16";
1350 url = "https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16";
1299 md5 = "0d0d5385bfb629eea636a80b9c2bfd16";
1351 md5 = "0d0d5385bfb629eea636a80b9c2bfd16";
1300 };
1352 };
1301 meta = {
1353 meta = {
1302 license = [ pkgs.lib.licenses.bsdOriginal ];
1354 license = [ pkgs.lib.licenses.bsdOriginal ];
1303 };
1355 };
1304 };
1356 };
1305 pycrypto = super.buildPythonPackage {
1357 pycrypto = super.buildPythonPackage {
1306 name = "pycrypto-2.6.1";
1358 name = "pycrypto-2.6.1";
1307 buildInputs = with self; [];
1359 buildInputs = with self; [];
1308 doCheck = false;
1360 doCheck = false;
1309 propagatedBuildInputs = with self; [];
1361 propagatedBuildInputs = with self; [];
1310 src = fetchurl {
1362 src = fetchurl {
1311 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1363 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1312 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1364 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1313 };
1365 };
1314 meta = {
1366 meta = {
1315 license = [ pkgs.lib.licenses.publicDomain ];
1367 license = [ pkgs.lib.licenses.publicDomain ];
1316 };
1368 };
1317 };
1369 };
1318 pycurl = super.buildPythonPackage {
1370 pycurl = super.buildPythonPackage {
1319 name = "pycurl-7.19.5";
1371 name = "pycurl-7.19.5";
1320 buildInputs = with self; [];
1372 buildInputs = with self; [];
1321 doCheck = false;
1373 doCheck = false;
1322 propagatedBuildInputs = with self; [];
1374 propagatedBuildInputs = with self; [];
1323 src = fetchurl {
1375 src = fetchurl {
1324 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1376 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1325 md5 = "47b4eac84118e2606658122104e62072";
1377 md5 = "47b4eac84118e2606658122104e62072";
1326 };
1378 };
1327 meta = {
1379 meta = {
1328 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1380 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1329 };
1381 };
1330 };
1382 };
1331 pyflakes = super.buildPythonPackage {
1383 pyflakes = super.buildPythonPackage {
1332 name = "pyflakes-0.8.1";
1384 name = "pyflakes-0.8.1";
1333 buildInputs = with self; [];
1385 buildInputs = with self; [];
1334 doCheck = false;
1386 doCheck = false;
1335 propagatedBuildInputs = with self; [];
1387 propagatedBuildInputs = with self; [];
1336 src = fetchurl {
1388 src = fetchurl {
1337 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1389 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1338 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1390 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1339 };
1391 };
1340 meta = {
1392 meta = {
1341 license = [ pkgs.lib.licenses.mit ];
1393 license = [ pkgs.lib.licenses.mit ];
1342 };
1394 };
1343 };
1395 };
1344 pygments-markdown-lexer = super.buildPythonPackage {
1396 pygments-markdown-lexer = super.buildPythonPackage {
1345 name = "pygments-markdown-lexer-0.1.0.dev39";
1397 name = "pygments-markdown-lexer-0.1.0.dev39";
1346 buildInputs = with self; [];
1398 buildInputs = with self; [];
1347 doCheck = false;
1399 doCheck = false;
1348 propagatedBuildInputs = with self; [Pygments];
1400 propagatedBuildInputs = with self; [Pygments];
1349 src = fetchurl {
1401 src = fetchurl {
1350 url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip";
1402 url = "https://pypi.python.org/packages/c3/12/674cdee66635d638cedb2c5d9c85ce507b7b2f91bdba29e482f1b1160ff6/pygments-markdown-lexer-0.1.0.dev39.zip";
1351 md5 = "6360fe0f6d1f896e35b7a0142ce6459c";
1403 md5 = "6360fe0f6d1f896e35b7a0142ce6459c";
1352 };
1404 };
1353 meta = {
1405 meta = {
1354 license = [ pkgs.lib.licenses.asl20 ];
1406 license = [ pkgs.lib.licenses.asl20 ];
1355 };
1407 };
1356 };
1408 };
1357 pyparsing = super.buildPythonPackage {
1409 pyparsing = super.buildPythonPackage {
1358 name = "pyparsing-1.5.7";
1410 name = "pyparsing-1.5.7";
1359 buildInputs = with self; [];
1411 buildInputs = with self; [];
1360 doCheck = false;
1412 doCheck = false;
1361 propagatedBuildInputs = with self; [];
1413 propagatedBuildInputs = with self; [];
1362 src = fetchurl {
1414 src = fetchurl {
1363 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1415 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1364 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1416 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1365 };
1417 };
1366 meta = {
1418 meta = {
1367 license = [ pkgs.lib.licenses.mit ];
1419 license = [ pkgs.lib.licenses.mit ];
1368 };
1420 };
1369 };
1421 };
1370 pyramid = super.buildPythonPackage {
1422 pyramid = super.buildPythonPackage {
1371 name = "pyramid-1.7.4";
1423 name = "pyramid-1.9";
1372 buildInputs = with self; [];
1424 buildInputs = with self; [];
1373 doCheck = false;
1425 doCheck = false;
1374 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1426 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
1375 src = fetchurl {
1427 src = fetchurl {
1376 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
1428 url = "https://pypi.python.org/packages/b0/73/715321e129334f3e41430bede877620175a63ed075fd5d1fd2c25b7cb121/pyramid-1.9.tar.gz";
1377 md5 = "6ef1dfdcff9136d04490410757c4c446";
1429 md5 = "aa6c7c568f83151af51eb053ac633bc4";
1378 };
1430 };
1379 meta = {
1431 meta = {
1380 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1432 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1381 };
1433 };
1382 };
1434 };
1383 pyramid-beaker = super.buildPythonPackage {
1435 pyramid-beaker = super.buildPythonPackage {
1384 name = "pyramid-beaker-0.8";
1436 name = "pyramid-beaker-0.8";
1385 buildInputs = with self; [];
1437 buildInputs = with self; [];
1386 doCheck = false;
1438 doCheck = false;
1387 propagatedBuildInputs = with self; [pyramid Beaker];
1439 propagatedBuildInputs = with self; [pyramid Beaker];
1388 src = fetchurl {
1440 src = fetchurl {
1389 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1441 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1390 md5 = "22f14be31b06549f80890e2c63a93834";
1442 md5 = "22f14be31b06549f80890e2c63a93834";
1391 };
1443 };
1392 meta = {
1444 meta = {
1393 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1445 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1394 };
1446 };
1395 };
1447 };
1396 pyramid-debugtoolbar = super.buildPythonPackage {
1448 pyramid-debugtoolbar = super.buildPythonPackage {
1397 name = "pyramid-debugtoolbar-3.0.5";
1449 name = "pyramid-debugtoolbar-3.0.5";
1398 buildInputs = with self; [];
1450 buildInputs = with self; [];
1399 doCheck = false;
1451 doCheck = false;
1400 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1452 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1401 src = fetchurl {
1453 src = fetchurl {
1402 url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz";
1454 url = "https://pypi.python.org/packages/64/0e/df00bfb55605900e7a2f7e4a18dd83575a6651688e297d5a0aa4c208fd7d/pyramid_debugtoolbar-3.0.5.tar.gz";
1403 md5 = "aebab8c3bfdc6f89e4d3adc1d126538e";
1455 md5 = "aebab8c3bfdc6f89e4d3adc1d126538e";
1404 };
1456 };
1405 meta = {
1457 meta = {
1406 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1458 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1407 };
1459 };
1408 };
1460 };
1409 pyramid-jinja2 = super.buildPythonPackage {
1461 pyramid-jinja2 = super.buildPythonPackage {
1410 name = "pyramid-jinja2-2.5";
1462 name = "pyramid-jinja2-2.5";
1411 buildInputs = with self; [];
1463 buildInputs = with self; [];
1412 doCheck = false;
1464 doCheck = false;
1413 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1465 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1414 src = fetchurl {
1466 src = fetchurl {
1415 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1467 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1416 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1468 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1417 };
1469 };
1418 meta = {
1470 meta = {
1419 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1471 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1420 };
1472 };
1421 };
1473 };
1422 pyramid-mako = super.buildPythonPackage {
1474 pyramid-mako = super.buildPythonPackage {
1423 name = "pyramid-mako-1.0.2";
1475 name = "pyramid-mako-1.0.2";
1424 buildInputs = with self; [];
1476 buildInputs = with self; [];
1425 doCheck = false;
1477 doCheck = false;
1426 propagatedBuildInputs = with self; [pyramid Mako];
1478 propagatedBuildInputs = with self; [pyramid Mako];
1427 src = fetchurl {
1479 src = fetchurl {
1428 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1480 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1429 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1481 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1430 };
1482 };
1431 meta = {
1483 meta = {
1432 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1484 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1433 };
1485 };
1434 };
1486 };
1435 pysqlite = super.buildPythonPackage {
1487 pysqlite = super.buildPythonPackage {
1436 name = "pysqlite-2.8.3";
1488 name = "pysqlite-2.8.3";
1437 buildInputs = with self; [];
1489 buildInputs = with self; [];
1438 doCheck = false;
1490 doCheck = false;
1439 propagatedBuildInputs = with self; [];
1491 propagatedBuildInputs = with self; [];
1440 src = fetchurl {
1492 src = fetchurl {
1441 url = "https://pypi.python.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1493 url = "https://pypi.python.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1442 md5 = "033f17b8644577715aee55e8832ac9fc";
1494 md5 = "033f17b8644577715aee55e8832ac9fc";
1443 };
1495 };
1444 meta = {
1496 meta = {
1445 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1497 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1446 };
1498 };
1447 };
1499 };
1448 pytest = super.buildPythonPackage {
1500 pytest = super.buildPythonPackage {
1449 name = "pytest-3.1.2";
1501 name = "pytest-3.1.2";
1450 buildInputs = with self; [];
1502 buildInputs = with self; [];
1451 doCheck = false;
1503 doCheck = false;
1452 propagatedBuildInputs = with self; [py setuptools];
1504 propagatedBuildInputs = with self; [py setuptools];
1453 src = fetchurl {
1505 src = fetchurl {
1454 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
1506 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
1455 md5 = "c4d179f89043cc925e1c169d03128e02";
1507 md5 = "c4d179f89043cc925e1c169d03128e02";
1456 };
1508 };
1457 meta = {
1509 meta = {
1458 license = [ pkgs.lib.licenses.mit ];
1510 license = [ pkgs.lib.licenses.mit ];
1459 };
1511 };
1460 };
1512 };
1461 pytest-catchlog = super.buildPythonPackage {
1513 pytest-catchlog = super.buildPythonPackage {
1462 name = "pytest-catchlog-1.2.2";
1514 name = "pytest-catchlog-1.2.2";
1463 buildInputs = with self; [];
1515 buildInputs = with self; [];
1464 doCheck = false;
1516 doCheck = false;
1465 propagatedBuildInputs = with self; [py pytest];
1517 propagatedBuildInputs = with self; [py pytest];
1466 src = fetchurl {
1518 src = fetchurl {
1467 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1519 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1468 md5 = "09d890c54c7456c818102b7ff8c182c8";
1520 md5 = "09d890c54c7456c818102b7ff8c182c8";
1469 };
1521 };
1470 meta = {
1522 meta = {
1471 license = [ pkgs.lib.licenses.mit ];
1523 license = [ pkgs.lib.licenses.mit ];
1472 };
1524 };
1473 };
1525 };
1474 pytest-cov = super.buildPythonPackage {
1526 pytest-cov = super.buildPythonPackage {
1475 name = "pytest-cov-2.5.1";
1527 name = "pytest-cov-2.5.1";
1476 buildInputs = with self; [];
1528 buildInputs = with self; [];
1477 doCheck = false;
1529 doCheck = false;
1478 propagatedBuildInputs = with self; [pytest coverage];
1530 propagatedBuildInputs = with self; [pytest coverage];
1479 src = fetchurl {
1531 src = fetchurl {
1480 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
1532 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
1481 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
1533 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
1482 };
1534 };
1483 meta = {
1535 meta = {
1484 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1536 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1485 };
1537 };
1486 };
1538 };
1487 pytest-profiling = super.buildPythonPackage {
1539 pytest-profiling = super.buildPythonPackage {
1488 name = "pytest-profiling-1.2.6";
1540 name = "pytest-profiling-1.2.6";
1489 buildInputs = with self; [];
1541 buildInputs = with self; [];
1490 doCheck = false;
1542 doCheck = false;
1491 propagatedBuildInputs = with self; [six pytest gprof2dot];
1543 propagatedBuildInputs = with self; [six pytest gprof2dot];
1492 src = fetchurl {
1544 src = fetchurl {
1493 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
1545 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
1494 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
1546 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
1495 };
1547 };
1496 meta = {
1548 meta = {
1497 license = [ pkgs.lib.licenses.mit ];
1549 license = [ pkgs.lib.licenses.mit ];
1498 };
1550 };
1499 };
1551 };
1500 pytest-runner = super.buildPythonPackage {
1552 pytest-runner = super.buildPythonPackage {
1501 name = "pytest-runner-2.11.1";
1553 name = "pytest-runner-2.11.1";
1502 buildInputs = with self; [];
1554 buildInputs = with self; [];
1503 doCheck = false;
1555 doCheck = false;
1504 propagatedBuildInputs = with self; [];
1556 propagatedBuildInputs = with self; [];
1505 src = fetchurl {
1557 src = fetchurl {
1506 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
1558 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
1507 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
1559 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
1508 };
1560 };
1509 meta = {
1561 meta = {
1510 license = [ pkgs.lib.licenses.mit ];
1562 license = [ pkgs.lib.licenses.mit ];
1511 };
1563 };
1512 };
1564 };
1513 pytest-sugar = super.buildPythonPackage {
1565 pytest-sugar = super.buildPythonPackage {
1514 name = "pytest-sugar-0.8.0";
1566 name = "pytest-sugar-0.8.0";
1515 buildInputs = with self; [];
1567 buildInputs = with self; [];
1516 doCheck = false;
1568 doCheck = false;
1517 propagatedBuildInputs = with self; [pytest termcolor];
1569 propagatedBuildInputs = with self; [pytest termcolor];
1518 src = fetchurl {
1570 src = fetchurl {
1519 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
1571 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
1520 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
1572 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
1521 };
1573 };
1522 meta = {
1574 meta = {
1523 license = [ pkgs.lib.licenses.bsdOriginal ];
1575 license = [ pkgs.lib.licenses.bsdOriginal ];
1524 };
1576 };
1525 };
1577 };
1526 pytest-timeout = super.buildPythonPackage {
1578 pytest-timeout = super.buildPythonPackage {
1527 name = "pytest-timeout-1.2.0";
1579 name = "pytest-timeout-1.2.0";
1528 buildInputs = with self; [];
1580 buildInputs = with self; [];
1529 doCheck = false;
1581 doCheck = false;
1530 propagatedBuildInputs = with self; [pytest];
1582 propagatedBuildInputs = with self; [pytest];
1531 src = fetchurl {
1583 src = fetchurl {
1532 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
1584 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
1533 md5 = "83607d91aa163562c7ee835da57d061d";
1585 md5 = "83607d91aa163562c7ee835da57d061d";
1534 };
1586 };
1535 meta = {
1587 meta = {
1536 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1588 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1537 };
1589 };
1538 };
1590 };
1539 python-dateutil = super.buildPythonPackage {
1591 python-dateutil = super.buildPythonPackage {
1540 name = "python-dateutil-2.1";
1592 name = "python-dateutil-2.1";
1541 buildInputs = with self; [];
1593 buildInputs = with self; [];
1542 doCheck = false;
1594 doCheck = false;
1543 propagatedBuildInputs = with self; [six];
1595 propagatedBuildInputs = with self; [six];
1544 src = fetchurl {
1596 src = fetchurl {
1545 url = "https://pypi.python.org/packages/65/52/9c18dac21f174ad31b65e22d24297864a954e6fe65876eba3f5773d2da43/python-dateutil-2.1.tar.gz";
1597 url = "https://pypi.python.org/packages/65/52/9c18dac21f174ad31b65e22d24297864a954e6fe65876eba3f5773d2da43/python-dateutil-2.1.tar.gz";
1546 md5 = "1534bb15cf311f07afaa3aacba1c028b";
1598 md5 = "1534bb15cf311f07afaa3aacba1c028b";
1547 };
1599 };
1548 meta = {
1600 meta = {
1549 license = [ { fullName = "Simplified BSD"; } ];
1601 license = [ { fullName = "Simplified BSD"; } ];
1550 };
1602 };
1551 };
1603 };
1552 python-editor = super.buildPythonPackage {
1604 python-editor = super.buildPythonPackage {
1553 name = "python-editor-1.0.3";
1605 name = "python-editor-1.0.3";
1554 buildInputs = with self; [];
1606 buildInputs = with self; [];
1555 doCheck = false;
1607 doCheck = false;
1556 propagatedBuildInputs = with self; [];
1608 propagatedBuildInputs = with self; [];
1557 src = fetchurl {
1609 src = fetchurl {
1558 url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz";
1610 url = "https://pypi.python.org/packages/65/1e/adf6e000ea5dc909aa420352d6ba37f16434c8a3c2fa030445411a1ed545/python-editor-1.0.3.tar.gz";
1559 md5 = "0aca5f2ef176ce68e98a5b7e31372835";
1611 md5 = "0aca5f2ef176ce68e98a5b7e31372835";
1560 };
1612 };
1561 meta = {
1613 meta = {
1562 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1614 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1563 };
1615 };
1564 };
1616 };
1565 python-ldap = super.buildPythonPackage {
1617 python-ldap = super.buildPythonPackage {
1566 name = "python-ldap-2.4.40";
1618 name = "python-ldap-2.4.40";
1567 buildInputs = with self; [];
1619 buildInputs = with self; [];
1568 doCheck = false;
1620 doCheck = false;
1569 propagatedBuildInputs = with self; [setuptools];
1621 propagatedBuildInputs = with self; [setuptools];
1570 src = fetchurl {
1622 src = fetchurl {
1571 url = "https://pypi.python.org/packages/4a/d8/7d70a7469058a3987d224061a81d778951ac2b48220bdcc511e4b1b37176/python-ldap-2.4.40.tar.gz";
1623 url = "https://pypi.python.org/packages/4a/d8/7d70a7469058a3987d224061a81d778951ac2b48220bdcc511e4b1b37176/python-ldap-2.4.40.tar.gz";
1572 md5 = "aea0233f7d39b0c7549fcd310deeb0e5";
1624 md5 = "aea0233f7d39b0c7549fcd310deeb0e5";
1573 };
1625 };
1574 meta = {
1626 meta = {
1575 license = [ pkgs.lib.licenses.psfl ];
1627 license = [ pkgs.lib.licenses.psfl ];
1576 };
1628 };
1577 };
1629 };
1578 python-memcached = super.buildPythonPackage {
1630 python-memcached = super.buildPythonPackage {
1579 name = "python-memcached-1.58";
1631 name = "python-memcached-1.58";
1580 buildInputs = with self; [];
1632 buildInputs = with self; [];
1581 doCheck = false;
1633 doCheck = false;
1582 propagatedBuildInputs = with self; [six];
1634 propagatedBuildInputs = with self; [six];
1583 src = fetchurl {
1635 src = fetchurl {
1584 url = "https://pypi.python.org/packages/f7/62/14b2448cfb04427366f24104c9da97cf8ea380d7258a3233f066a951a8d8/python-memcached-1.58.tar.gz";
1636 url = "https://pypi.python.org/packages/f7/62/14b2448cfb04427366f24104c9da97cf8ea380d7258a3233f066a951a8d8/python-memcached-1.58.tar.gz";
1585 md5 = "23b258105013d14d899828d334e6b044";
1637 md5 = "23b258105013d14d899828d334e6b044";
1586 };
1638 };
1587 meta = {
1639 meta = {
1588 license = [ pkgs.lib.licenses.psfl ];
1640 license = [ pkgs.lib.licenses.psfl ];
1589 };
1641 };
1590 };
1642 };
1591 python-pam = super.buildPythonPackage {
1643 python-pam = super.buildPythonPackage {
1592 name = "python-pam-1.8.2";
1644 name = "python-pam-1.8.2";
1593 buildInputs = with self; [];
1645 buildInputs = with self; [];
1594 doCheck = false;
1646 doCheck = false;
1595 propagatedBuildInputs = with self; [];
1647 propagatedBuildInputs = with self; [];
1596 src = fetchurl {
1648 src = fetchurl {
1597 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1649 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1598 md5 = "db71b6b999246fb05d78ecfbe166629d";
1650 md5 = "db71b6b999246fb05d78ecfbe166629d";
1599 };
1651 };
1600 meta = {
1652 meta = {
1601 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1653 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1602 };
1654 };
1603 };
1655 };
1604 pytz = super.buildPythonPackage {
1656 pytz = super.buildPythonPackage {
1605 name = "pytz-2015.4";
1657 name = "pytz-2015.4";
1606 buildInputs = with self; [];
1658 buildInputs = with self; [];
1607 doCheck = false;
1659 doCheck = false;
1608 propagatedBuildInputs = with self; [];
1660 propagatedBuildInputs = with self; [];
1609 src = fetchurl {
1661 src = fetchurl {
1610 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1662 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1611 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1663 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1612 };
1664 };
1613 meta = {
1665 meta = {
1614 license = [ pkgs.lib.licenses.mit ];
1666 license = [ pkgs.lib.licenses.mit ];
1615 };
1667 };
1616 };
1668 };
1617 pyzmq = super.buildPythonPackage {
1669 pyzmq = super.buildPythonPackage {
1618 name = "pyzmq-14.6.0";
1670 name = "pyzmq-14.6.0";
1619 buildInputs = with self; [];
1671 buildInputs = with self; [];
1620 doCheck = false;
1672 doCheck = false;
1621 propagatedBuildInputs = with self; [];
1673 propagatedBuildInputs = with self; [];
1622 src = fetchurl {
1674 src = fetchurl {
1623 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1675 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1624 md5 = "395b5de95a931afa5b14c9349a5b8024";
1676 md5 = "395b5de95a931afa5b14c9349a5b8024";
1625 };
1677 };
1626 meta = {
1678 meta = {
1627 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1679 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1628 };
1680 };
1629 };
1681 };
1630 recaptcha-client = super.buildPythonPackage {
1682 recaptcha-client = super.buildPythonPackage {
1631 name = "recaptcha-client-1.0.6";
1683 name = "recaptcha-client-1.0.6";
1632 buildInputs = with self; [];
1684 buildInputs = with self; [];
1633 doCheck = false;
1685 doCheck = false;
1634 propagatedBuildInputs = with self; [];
1686 propagatedBuildInputs = with self; [];
1635 src = fetchurl {
1687 src = fetchurl {
1636 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1688 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1637 md5 = "74228180f7e1fb76c4d7089160b0d919";
1689 md5 = "74228180f7e1fb76c4d7089160b0d919";
1638 };
1690 };
1639 meta = {
1691 meta = {
1640 license = [ { fullName = "MIT/X11"; } ];
1692 license = [ { fullName = "MIT/X11"; } ];
1641 };
1693 };
1642 };
1694 };
1643 repoze.lru = super.buildPythonPackage {
1695 repoze.lru = super.buildPythonPackage {
1644 name = "repoze.lru-0.6";
1696 name = "repoze.lru-0.6";
1645 buildInputs = with self; [];
1697 buildInputs = with self; [];
1646 doCheck = false;
1698 doCheck = false;
1647 propagatedBuildInputs = with self; [];
1699 propagatedBuildInputs = with self; [];
1648 src = fetchurl {
1700 src = fetchurl {
1649 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1701 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1650 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1702 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1651 };
1703 };
1652 meta = {
1704 meta = {
1653 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1705 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1654 };
1706 };
1655 };
1707 };
1656 requests = super.buildPythonPackage {
1708 requests = super.buildPythonPackage {
1657 name = "requests-2.9.1";
1709 name = "requests-2.9.1";
1658 buildInputs = with self; [];
1710 buildInputs = with self; [];
1659 doCheck = false;
1711 doCheck = false;
1660 propagatedBuildInputs = with self; [];
1712 propagatedBuildInputs = with self; [];
1661 src = fetchurl {
1713 src = fetchurl {
1662 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1714 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1663 md5 = "0b7f480d19012ec52bab78292efd976d";
1715 md5 = "0b7f480d19012ec52bab78292efd976d";
1664 };
1716 };
1665 meta = {
1717 meta = {
1666 license = [ pkgs.lib.licenses.asl20 ];
1718 license = [ pkgs.lib.licenses.asl20 ];
1667 };
1719 };
1668 };
1720 };
1669 rhodecode-enterprise-ce = super.buildPythonPackage {
1721 rhodecode-enterprise-ce = super.buildPythonPackage {
1670 name = "rhodecode-enterprise-ce-4.9.0";
1722 name = "rhodecode-enterprise-ce-4.9.0";
1671 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
1723 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
1672 doCheck = true;
1724 doCheck = true;
1673 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress cssselect celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu lxml msgpack-python nbconvert packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1725 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments pygments-markdown-lexer Pylons Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress cssselect celery channelstream colander decorator deform docutils gevent gunicorn infrae.cache ipython iso8601 kombu lxml msgpack-python nbconvert packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson subprocess32 waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1674 src = ./.;
1726 src = ./.;
1675 meta = {
1727 meta = {
1676 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1728 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1677 };
1729 };
1678 };
1730 };
1679 rhodecode-tools = super.buildPythonPackage {
1731 rhodecode-tools = super.buildPythonPackage {
1680 name = "rhodecode-tools-0.12.0";
1732 name = "rhodecode-tools-0.12.0";
1681 buildInputs = with self; [];
1733 buildInputs = with self; [];
1682 doCheck = false;
1734 doCheck = false;
1683 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh];
1735 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests elasticsearch elasticsearch-dsl urllib3 Whoosh];
1684 src = fetchurl {
1736 src = fetchurl {
1685 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4";
1737 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4";
1686 md5 = "9ca040356fa7e38d3f64529a4cffdca4";
1738 md5 = "9ca040356fa7e38d3f64529a4cffdca4";
1687 };
1739 };
1688 meta = {
1740 meta = {
1689 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1741 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1690 };
1742 };
1691 };
1743 };
1692 scandir = super.buildPythonPackage {
1744 scandir = super.buildPythonPackage {
1693 name = "scandir-1.5";
1745 name = "scandir-1.5";
1694 buildInputs = with self; [];
1746 buildInputs = with self; [];
1695 doCheck = false;
1747 doCheck = false;
1696 propagatedBuildInputs = with self; [];
1748 propagatedBuildInputs = with self; [];
1697 src = fetchurl {
1749 src = fetchurl {
1698 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
1750 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
1699 md5 = "a2713043de681bba6b084be42e7a8a44";
1751 md5 = "a2713043de681bba6b084be42e7a8a44";
1700 };
1752 };
1701 meta = {
1753 meta = {
1702 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1754 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1703 };
1755 };
1704 };
1756 };
1705 setproctitle = super.buildPythonPackage {
1757 setproctitle = super.buildPythonPackage {
1706 name = "setproctitle-1.1.8";
1758 name = "setproctitle-1.1.8";
1707 buildInputs = with self; [];
1759 buildInputs = with self; [];
1708 doCheck = false;
1760 doCheck = false;
1709 propagatedBuildInputs = with self; [];
1761 propagatedBuildInputs = with self; [];
1710 src = fetchurl {
1762 src = fetchurl {
1711 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1763 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1712 md5 = "728f4c8c6031bbe56083a48594027edd";
1764 md5 = "728f4c8c6031bbe56083a48594027edd";
1713 };
1765 };
1714 meta = {
1766 meta = {
1715 license = [ pkgs.lib.licenses.bsdOriginal ];
1767 license = [ pkgs.lib.licenses.bsdOriginal ];
1716 };
1768 };
1717 };
1769 };
1718 setuptools = super.buildPythonPackage {
1770 setuptools = super.buildPythonPackage {
1719 name = "setuptools-30.1.0";
1771 name = "setuptools-30.1.0";
1720 buildInputs = with self; [];
1772 buildInputs = with self; [];
1721 doCheck = false;
1773 doCheck = false;
1722 propagatedBuildInputs = with self; [];
1774 propagatedBuildInputs = with self; [];
1723 src = fetchurl {
1775 src = fetchurl {
1724 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
1776 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
1725 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
1777 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
1726 };
1778 };
1727 meta = {
1779 meta = {
1728 license = [ pkgs.lib.licenses.mit ];
1780 license = [ pkgs.lib.licenses.mit ];
1729 };
1781 };
1730 };
1782 };
1731 setuptools-scm = super.buildPythonPackage {
1783 setuptools-scm = super.buildPythonPackage {
1732 name = "setuptools-scm-1.15.0";
1784 name = "setuptools-scm-1.15.0";
1733 buildInputs = with self; [];
1785 buildInputs = with self; [];
1734 doCheck = false;
1786 doCheck = false;
1735 propagatedBuildInputs = with self; [];
1787 propagatedBuildInputs = with self; [];
1736 src = fetchurl {
1788 src = fetchurl {
1737 url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz";
1789 url = "https://pypi.python.org/packages/80/b7/31b6ae5fcb188e37f7e31abe75f9be90490a5456a72860fa6e643f8a3cbc/setuptools_scm-1.15.0.tar.gz";
1738 md5 = "b6916c78ed6253d6602444fad4279c5b";
1790 md5 = "b6916c78ed6253d6602444fad4279c5b";
1739 };
1791 };
1740 meta = {
1792 meta = {
1741 license = [ pkgs.lib.licenses.mit ];
1793 license = [ pkgs.lib.licenses.mit ];
1742 };
1794 };
1743 };
1795 };
1744 simplegeneric = super.buildPythonPackage {
1796 simplegeneric = super.buildPythonPackage {
1745 name = "simplegeneric-0.8.1";
1797 name = "simplegeneric-0.8.1";
1746 buildInputs = with self; [];
1798 buildInputs = with self; [];
1747 doCheck = false;
1799 doCheck = false;
1748 propagatedBuildInputs = with self; [];
1800 propagatedBuildInputs = with self; [];
1749 src = fetchurl {
1801 src = fetchurl {
1750 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1802 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1751 md5 = "f9c1fab00fd981be588fc32759f474e3";
1803 md5 = "f9c1fab00fd981be588fc32759f474e3";
1752 };
1804 };
1753 meta = {
1805 meta = {
1754 license = [ pkgs.lib.licenses.zpt21 ];
1806 license = [ pkgs.lib.licenses.zpt21 ];
1755 };
1807 };
1756 };
1808 };
1757 simplejson = super.buildPythonPackage {
1809 simplejson = super.buildPythonPackage {
1758 name = "simplejson-3.11.1";
1810 name = "simplejson-3.11.1";
1759 buildInputs = with self; [];
1811 buildInputs = with self; [];
1760 doCheck = false;
1812 doCheck = false;
1761 propagatedBuildInputs = with self; [];
1813 propagatedBuildInputs = with self; [];
1762 src = fetchurl {
1814 src = fetchurl {
1763 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
1815 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
1764 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
1816 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
1765 };
1817 };
1766 meta = {
1818 meta = {
1767 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1819 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1768 };
1820 };
1769 };
1821 };
1770 six = super.buildPythonPackage {
1822 six = super.buildPythonPackage {
1771 name = "six-1.9.0";
1823 name = "six-1.9.0";
1772 buildInputs = with self; [];
1824 buildInputs = with self; [];
1773 doCheck = false;
1825 doCheck = false;
1774 propagatedBuildInputs = with self; [];
1826 propagatedBuildInputs = with self; [];
1775 src = fetchurl {
1827 src = fetchurl {
1776 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1828 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1777 md5 = "476881ef4012262dfc8adc645ee786c4";
1829 md5 = "476881ef4012262dfc8adc645ee786c4";
1778 };
1830 };
1779 meta = {
1831 meta = {
1780 license = [ pkgs.lib.licenses.mit ];
1832 license = [ pkgs.lib.licenses.mit ];
1781 };
1833 };
1782 };
1834 };
1783 subprocess32 = super.buildPythonPackage {
1835 subprocess32 = super.buildPythonPackage {
1784 name = "subprocess32-3.2.7";
1836 name = "subprocess32-3.2.7";
1785 buildInputs = with self; [];
1837 buildInputs = with self; [];
1786 doCheck = false;
1838 doCheck = false;
1787 propagatedBuildInputs = with self; [];
1839 propagatedBuildInputs = with self; [];
1788 src = fetchurl {
1840 src = fetchurl {
1789 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
1841 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
1790 md5 = "824c801e479d3e916879aae3e9c15e16";
1842 md5 = "824c801e479d3e916879aae3e9c15e16";
1791 };
1843 };
1792 meta = {
1844 meta = {
1793 license = [ pkgs.lib.licenses.psfl ];
1845 license = [ pkgs.lib.licenses.psfl ];
1794 };
1846 };
1795 };
1847 };
1796 supervisor = super.buildPythonPackage {
1848 supervisor = super.buildPythonPackage {
1797 name = "supervisor-3.3.2";
1849 name = "supervisor-3.3.2";
1798 buildInputs = with self; [];
1850 buildInputs = with self; [];
1799 doCheck = false;
1851 doCheck = false;
1800 propagatedBuildInputs = with self; [meld3];
1852 propagatedBuildInputs = with self; [meld3];
1801 src = fetchurl {
1853 src = fetchurl {
1802 url = "https://pypi.python.org/packages/7b/17/88adf8cb25f80e2bc0d18e094fcd7ab300632ea00b601cbbbb84c2419eae/supervisor-3.3.2.tar.gz";
1854 url = "https://pypi.python.org/packages/7b/17/88adf8cb25f80e2bc0d18e094fcd7ab300632ea00b601cbbbb84c2419eae/supervisor-3.3.2.tar.gz";
1803 md5 = "04766d62864da13d6a12f7429e75314f";
1855 md5 = "04766d62864da13d6a12f7429e75314f";
1804 };
1856 };
1805 meta = {
1857 meta = {
1806 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1858 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1807 };
1859 };
1808 };
1860 };
1809 termcolor = super.buildPythonPackage {
1861 termcolor = super.buildPythonPackage {
1810 name = "termcolor-1.1.0";
1862 name = "termcolor-1.1.0";
1811 buildInputs = with self; [];
1863 buildInputs = with self; [];
1812 doCheck = false;
1864 doCheck = false;
1813 propagatedBuildInputs = with self; [];
1865 propagatedBuildInputs = with self; [];
1814 src = fetchurl {
1866 src = fetchurl {
1815 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
1867 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
1816 md5 = "043e89644f8909d462fbbfa511c768df";
1868 md5 = "043e89644f8909d462fbbfa511c768df";
1817 };
1869 };
1818 meta = {
1870 meta = {
1819 license = [ pkgs.lib.licenses.mit ];
1871 license = [ pkgs.lib.licenses.mit ];
1820 };
1872 };
1821 };
1873 };
1822 testpath = super.buildPythonPackage {
1874 testpath = super.buildPythonPackage {
1823 name = "testpath-0.3.1";
1875 name = "testpath-0.3.1";
1824 buildInputs = with self; [];
1876 buildInputs = with self; [];
1825 doCheck = false;
1877 doCheck = false;
1826 propagatedBuildInputs = with self; [];
1878 propagatedBuildInputs = with self; [];
1827 src = fetchurl {
1879 src = fetchurl {
1828 url = "https://pypi.python.org/packages/f4/8b/b71e9ee10e5f751e9d959bc750ab122ba04187f5aa52aabdc4e63b0e31a7/testpath-0.3.1.tar.gz";
1880 url = "https://pypi.python.org/packages/f4/8b/b71e9ee10e5f751e9d959bc750ab122ba04187f5aa52aabdc4e63b0e31a7/testpath-0.3.1.tar.gz";
1829 md5 = "2cd5ed5522fda781bb497c9d80ae2fc9";
1881 md5 = "2cd5ed5522fda781bb497c9d80ae2fc9";
1830 };
1882 };
1831 meta = {
1883 meta = {
1832 license = [ pkgs.lib.licenses.mit ];
1884 license = [ pkgs.lib.licenses.mit ];
1833 };
1885 };
1834 };
1886 };
1835 traitlets = super.buildPythonPackage {
1887 traitlets = super.buildPythonPackage {
1836 name = "traitlets-4.3.2";
1888 name = "traitlets-4.3.2";
1837 buildInputs = with self; [];
1889 buildInputs = with self; [];
1838 doCheck = false;
1890 doCheck = false;
1839 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
1891 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
1840 src = fetchurl {
1892 src = fetchurl {
1841 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
1893 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
1842 md5 = "3068663f2f38fd939a9eb3a500ccc154";
1894 md5 = "3068663f2f38fd939a9eb3a500ccc154";
1843 };
1895 };
1844 meta = {
1896 meta = {
1845 license = [ pkgs.lib.licenses.bsdOriginal ];
1897 license = [ pkgs.lib.licenses.bsdOriginal ];
1846 };
1898 };
1847 };
1899 };
1848 transifex-client = super.buildPythonPackage {
1900 transifex-client = super.buildPythonPackage {
1849 name = "transifex-client-0.10";
1901 name = "transifex-client-0.10";
1850 buildInputs = with self; [];
1902 buildInputs = with self; [];
1851 doCheck = false;
1903 doCheck = false;
1852 propagatedBuildInputs = with self; [];
1904 propagatedBuildInputs = with self; [];
1853 src = fetchurl {
1905 src = fetchurl {
1854 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1906 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1855 md5 = "5549538d84b8eede6b254cd81ae024fa";
1907 md5 = "5549538d84b8eede6b254cd81ae024fa";
1856 };
1908 };
1857 meta = {
1909 meta = {
1858 license = [ pkgs.lib.licenses.gpl2 ];
1910 license = [ pkgs.lib.licenses.gpl2 ];
1859 };
1911 };
1860 };
1912 };
1861 translationstring = super.buildPythonPackage {
1913 translationstring = super.buildPythonPackage {
1862 name = "translationstring-1.3";
1914 name = "translationstring-1.3";
1863 buildInputs = with self; [];
1915 buildInputs = with self; [];
1864 doCheck = false;
1916 doCheck = false;
1865 propagatedBuildInputs = with self; [];
1917 propagatedBuildInputs = with self; [];
1866 src = fetchurl {
1918 src = fetchurl {
1867 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1919 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1868 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1920 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1869 };
1921 };
1870 meta = {
1922 meta = {
1871 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1923 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1872 };
1924 };
1873 };
1925 };
1874 trollius = super.buildPythonPackage {
1926 trollius = super.buildPythonPackage {
1875 name = "trollius-1.0.4";
1927 name = "trollius-1.0.4";
1876 buildInputs = with self; [];
1928 buildInputs = with self; [];
1877 doCheck = false;
1929 doCheck = false;
1878 propagatedBuildInputs = with self; [futures];
1930 propagatedBuildInputs = with self; [futures];
1879 src = fetchurl {
1931 src = fetchurl {
1880 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1932 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1881 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1933 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1882 };
1934 };
1883 meta = {
1935 meta = {
1884 license = [ pkgs.lib.licenses.asl20 ];
1936 license = [ pkgs.lib.licenses.asl20 ];
1885 };
1937 };
1886 };
1938 };
1887 uWSGI = super.buildPythonPackage {
1939 uWSGI = super.buildPythonPackage {
1888 name = "uWSGI-2.0.15";
1940 name = "uWSGI-2.0.15";
1889 buildInputs = with self; [];
1941 buildInputs = with self; [];
1890 doCheck = false;
1942 doCheck = false;
1891 propagatedBuildInputs = with self; [];
1943 propagatedBuildInputs = with self; [];
1892 src = fetchurl {
1944 src = fetchurl {
1893 url = "https://pypi.python.org/packages/bb/0a/45e5aa80dc135889594bb371c082d20fb7ee7303b174874c996888cc8511/uwsgi-2.0.15.tar.gz";
1945 url = "https://pypi.python.org/packages/bb/0a/45e5aa80dc135889594bb371c082d20fb7ee7303b174874c996888cc8511/uwsgi-2.0.15.tar.gz";
1894 md5 = "fc50bd9e83b7602fa474b032167010a7";
1946 md5 = "fc50bd9e83b7602fa474b032167010a7";
1895 };
1947 };
1896 meta = {
1948 meta = {
1897 license = [ pkgs.lib.licenses.gpl2 ];
1949 license = [ pkgs.lib.licenses.gpl2 ];
1898 };
1950 };
1899 };
1951 };
1900 urllib3 = super.buildPythonPackage {
1952 urllib3 = super.buildPythonPackage {
1901 name = "urllib3-1.16";
1953 name = "urllib3-1.16";
1902 buildInputs = with self; [];
1954 buildInputs = with self; [];
1903 doCheck = false;
1955 doCheck = false;
1904 propagatedBuildInputs = with self; [];
1956 propagatedBuildInputs = with self; [];
1905 src = fetchurl {
1957 src = fetchurl {
1906 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1958 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1907 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1959 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1908 };
1960 };
1909 meta = {
1961 meta = {
1910 license = [ pkgs.lib.licenses.mit ];
1962 license = [ pkgs.lib.licenses.mit ];
1911 };
1963 };
1912 };
1964 };
1913 venusian = super.buildPythonPackage {
1965 venusian = super.buildPythonPackage {
1914 name = "venusian-1.1.0";
1966 name = "venusian-1.1.0";
1915 buildInputs = with self; [];
1967 buildInputs = with self; [];
1916 doCheck = false;
1968 doCheck = false;
1917 propagatedBuildInputs = with self; [];
1969 propagatedBuildInputs = with self; [];
1918 src = fetchurl {
1970 src = fetchurl {
1919 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
1971 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
1920 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
1972 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
1921 };
1973 };
1922 meta = {
1974 meta = {
1923 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1975 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1924 };
1976 };
1925 };
1977 };
1926 waitress = super.buildPythonPackage {
1978 waitress = super.buildPythonPackage {
1927 name = "waitress-1.0.2";
1979 name = "waitress-1.0.2";
1928 buildInputs = with self; [];
1980 buildInputs = with self; [];
1929 doCheck = false;
1981 doCheck = false;
1930 propagatedBuildInputs = with self; [];
1982 propagatedBuildInputs = with self; [];
1931 src = fetchurl {
1983 src = fetchurl {
1932 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
1984 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
1933 md5 = "b968f39e95d609f6194c6e50425d4bb7";
1985 md5 = "b968f39e95d609f6194c6e50425d4bb7";
1934 };
1986 };
1935 meta = {
1987 meta = {
1936 license = [ pkgs.lib.licenses.zpt21 ];
1988 license = [ pkgs.lib.licenses.zpt21 ];
1937 };
1989 };
1938 };
1990 };
1939 wcwidth = super.buildPythonPackage {
1991 wcwidth = super.buildPythonPackage {
1940 name = "wcwidth-0.1.7";
1992 name = "wcwidth-0.1.7";
1941 buildInputs = with self; [];
1993 buildInputs = with self; [];
1942 doCheck = false;
1994 doCheck = false;
1943 propagatedBuildInputs = with self; [];
1995 propagatedBuildInputs = with self; [];
1944 src = fetchurl {
1996 src = fetchurl {
1945 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1997 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1946 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
1998 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
1947 };
1999 };
1948 meta = {
2000 meta = {
1949 license = [ pkgs.lib.licenses.mit ];
2001 license = [ pkgs.lib.licenses.mit ];
1950 };
2002 };
1951 };
2003 };
1952 ws4py = super.buildPythonPackage {
2004 ws4py = super.buildPythonPackage {
1953 name = "ws4py-0.3.5";
2005 name = "ws4py-0.3.5";
1954 buildInputs = with self; [];
2006 buildInputs = with self; [];
1955 doCheck = false;
2007 doCheck = false;
1956 propagatedBuildInputs = with self; [];
2008 propagatedBuildInputs = with self; [];
1957 src = fetchurl {
2009 src = fetchurl {
1958 url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip";
2010 url = "https://pypi.python.org/packages/b6/4f/34af703be86939629479e74d6e650e39f3bd73b3b09212c34e5125764cbc/ws4py-0.3.5.zip";
1959 md5 = "a261b75c20b980e55ce7451a3576a867";
2011 md5 = "a261b75c20b980e55ce7451a3576a867";
1960 };
2012 };
1961 meta = {
2013 meta = {
1962 license = [ pkgs.lib.licenses.bsdOriginal ];
2014 license = [ pkgs.lib.licenses.bsdOriginal ];
1963 };
2015 };
1964 };
2016 };
1965 wsgiref = super.buildPythonPackage {
2017 wsgiref = super.buildPythonPackage {
1966 name = "wsgiref-0.1.2";
2018 name = "wsgiref-0.1.2";
1967 buildInputs = with self; [];
2019 buildInputs = with self; [];
1968 doCheck = false;
2020 doCheck = false;
1969 propagatedBuildInputs = with self; [];
2021 propagatedBuildInputs = with self; [];
1970 src = fetchurl {
2022 src = fetchurl {
1971 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2023 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1972 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
2024 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1973 };
2025 };
1974 meta = {
2026 meta = {
1975 license = [ { fullName = "PSF or ZPL"; } ];
2027 license = [ { fullName = "PSF or ZPL"; } ];
1976 };
2028 };
1977 };
2029 };
1978 zope.cachedescriptors = super.buildPythonPackage {
2030 zope.cachedescriptors = super.buildPythonPackage {
1979 name = "zope.cachedescriptors-4.0.0";
2031 name = "zope.cachedescriptors-4.0.0";
1980 buildInputs = with self; [];
2032 buildInputs = with self; [];
1981 doCheck = false;
2033 doCheck = false;
1982 propagatedBuildInputs = with self; [setuptools];
2034 propagatedBuildInputs = with self; [setuptools];
1983 src = fetchurl {
2035 src = fetchurl {
1984 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
2036 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1985 md5 = "8d308de8c936792c8e758058fcb7d0f0";
2037 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1986 };
2038 };
1987 meta = {
2039 meta = {
1988 license = [ pkgs.lib.licenses.zpt21 ];
2040 license = [ pkgs.lib.licenses.zpt21 ];
1989 };
2041 };
1990 };
2042 };
1991 zope.deprecation = super.buildPythonPackage {
2043 zope.deprecation = super.buildPythonPackage {
1992 name = "zope.deprecation-4.1.2";
2044 name = "zope.deprecation-4.1.2";
1993 buildInputs = with self; [];
2045 buildInputs = with self; [];
1994 doCheck = false;
2046 doCheck = false;
1995 propagatedBuildInputs = with self; [setuptools];
2047 propagatedBuildInputs = with self; [setuptools];
1996 src = fetchurl {
2048 src = fetchurl {
1997 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
2049 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1998 md5 = "e9a663ded58f4f9f7881beb56cae2782";
2050 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1999 };
2051 };
2000 meta = {
2052 meta = {
2001 license = [ pkgs.lib.licenses.zpt21 ];
2053 license = [ pkgs.lib.licenses.zpt21 ];
2002 };
2054 };
2003 };
2055 };
2004 zope.event = super.buildPythonPackage {
2056 zope.event = super.buildPythonPackage {
2005 name = "zope.event-4.0.3";
2057 name = "zope.event-4.0.3";
2006 buildInputs = with self; [];
2058 buildInputs = with self; [];
2007 doCheck = false;
2059 doCheck = false;
2008 propagatedBuildInputs = with self; [setuptools];
2060 propagatedBuildInputs = with self; [setuptools];
2009 src = fetchurl {
2061 src = fetchurl {
2010 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
2062 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
2011 md5 = "9a3780916332b18b8b85f522bcc3e249";
2063 md5 = "9a3780916332b18b8b85f522bcc3e249";
2012 };
2064 };
2013 meta = {
2065 meta = {
2014 license = [ pkgs.lib.licenses.zpt21 ];
2066 license = [ pkgs.lib.licenses.zpt21 ];
2015 };
2067 };
2016 };
2068 };
2017 zope.interface = super.buildPythonPackage {
2069 zope.interface = super.buildPythonPackage {
2018 name = "zope.interface-4.1.3";
2070 name = "zope.interface-4.1.3";
2019 buildInputs = with self; [];
2071 buildInputs = with self; [];
2020 doCheck = false;
2072 doCheck = false;
2021 propagatedBuildInputs = with self; [setuptools];
2073 propagatedBuildInputs = with self; [setuptools];
2022 src = fetchurl {
2074 src = fetchurl {
2023 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
2075 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
2024 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
2076 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
2025 };
2077 };
2026 meta = {
2078 meta = {
2027 license = [ pkgs.lib.licenses.zpt21 ];
2079 license = [ pkgs.lib.licenses.zpt21 ];
2028 };
2080 };
2029 };
2081 };
2030
2082
2031 ### Test requirements
2083 ### Test requirements
2032
2084
2033
2085
2034 }
2086 }
@@ -1,136 +1,136 b''
1 ## core
1 ## core
2 setuptools==30.1.0
2 setuptools==30.1.0
3 setuptools-scm==1.15.0
3 setuptools-scm==1.15.0
4
4
5 amqplib==1.0.2
5 amqplib==1.0.2
6 anyjson==0.3.3
6 anyjson==0.3.3
7 authomatic==0.1.0.post1
7 authomatic==0.1.0.post1
8 Babel==1.3
8 Babel==1.3
9 backport-ipaddress==0.1
9 backport-ipaddress==0.1
10 Beaker==1.9.0
10 Beaker==1.9.0
11 celery==2.2.10
11 celery==2.2.10
12 Chameleon==2.24
12 Chameleon==2.24
13 channelstream==0.5.2
13 channelstream==0.5.2
14 click==5.1
14 click==5.1
15 colander==1.3.3
15 colander==1.3.3
16 configobj==5.0.6
16 configobj==5.0.6
17 cssselect==1.0.1
17 cssselect==1.0.1
18 decorator==4.0.11
18 decorator==4.0.11
19 deform==2.0.4
19 deform==2.0.4
20 docutils==0.13.1
20 docutils==0.13.1
21 dogpile.cache==0.6.4
21 dogpile.cache==0.6.4
22 dogpile.core==0.4.1
22 dogpile.core==0.4.1
23 ecdsa==0.11
23 ecdsa==0.11
24 FormEncode==1.2.4
24 FormEncode==1.2.4
25 future==0.14.3
25 future==0.14.3
26 futures==3.0.2
26 futures==3.0.2
27 gnureadline==6.3.3
27 gnureadline==6.3.3
28 infrae.cache==1.0.1
28 infrae.cache==1.0.1
29 iso8601==0.1.11
29 iso8601==0.1.11
30 itsdangerous==0.24
30 itsdangerous==0.24
31 Jinja2==2.7.3
31 Jinja2==2.7.3
32 kombu==1.5.1
32 kombu==1.5.1
33 lxml==3.7.3
33 lxml==3.7.3
34 Mako==1.0.6
34 Mako==1.0.6
35 Markdown==2.6.8
35 Markdown==2.6.8
36 MarkupSafe==0.23
36 MarkupSafe==0.23
37 meld3==1.0.2
37 meld3==1.0.2
38 msgpack-python==0.4.8
38 msgpack-python==0.4.8
39 MySQL-python==1.2.5
39 MySQL-python==1.2.5
40 nose==1.3.6
40 nose==1.3.6
41 objgraph==3.1.0
41 objgraph==3.1.0
42 packaging==15.2
42 packaging==15.2
43 paramiko==1.15.1
43 paramiko==1.15.1
44 Paste==2.0.3
44 Paste==2.0.3
45 PasteDeploy==1.5.2
45 PasteDeploy==1.5.2
46 PasteScript==1.7.5
46 PasteScript==1.7.5
47 pathlib2==2.3.0
47 pathlib2==2.3.0
48 psutil==4.3.1
48 psutil==4.3.1
49 psycopg2==2.7.1
49 psycopg2==2.7.1
50 py-bcrypt==0.4
50 py-bcrypt==0.4
51 pycrypto==2.6.1
51 pycrypto==2.6.1
52 pycurl==7.19.5
52 pycurl==7.19.5
53 pyflakes==0.8.1
53 pyflakes==0.8.1
54 pygments-markdown-lexer==0.1.0.dev39
54 pygments-markdown-lexer==0.1.0.dev39
55 Pygments==2.2.0
55 Pygments==2.2.0
56 pyparsing==1.5.7
56 pyparsing==1.5.7
57 pyramid-beaker==0.8
57 pyramid-beaker==0.8
58 pyramid-debugtoolbar==3.0.5
58 pyramid-debugtoolbar==3.0.5
59 pyramid-jinja2==2.5
59 pyramid-jinja2==2.5
60 pyramid-mako==1.0.2
60 pyramid-mako==1.0.2
61 pyramid==1.7.4
61 pyramid==1.9.0
62 pysqlite==2.8.3
62 pysqlite==2.8.3
63 python-dateutil==2.1
63 python-dateutil==2.1
64 python-ldap==2.4.40
64 python-ldap==2.4.40
65 python-memcached==1.58
65 python-memcached==1.58
66 python-pam==1.8.2
66 python-pam==1.8.2
67 pytz==2015.4
67 pytz==2015.4
68 pyzmq==14.6.0
68 pyzmq==14.6.0
69 recaptcha-client==1.0.6
69 recaptcha-client==1.0.6
70 repoze.lru==0.6
70 repoze.lru==0.6
71 requests==2.9.1
71 requests==2.9.1
72 Routes==1.13
72 Routes==1.13
73 setproctitle==1.1.8
73 setproctitle==1.1.8
74 simplejson==3.11.1
74 simplejson==3.11.1
75 six==1.9.0
75 six==1.9.0
76 Sphinx==1.2.2
76 Sphinx==1.2.2
77 SQLAlchemy==0.9.9
77 SQLAlchemy==0.9.9
78 subprocess32==3.2.7
78 subprocess32==3.2.7
79 supervisor==3.3.2
79 supervisor==3.3.2
80 Tempita==0.5.2
80 Tempita==0.5.2
81 translationstring==1.3
81 translationstring==1.3
82 trollius==1.0.4
82 trollius==1.0.4
83 urllib3==1.16
83 urllib3==1.16
84 URLObject==2.4.0
84 URLObject==2.4.0
85 venusian==1.1.0
85 venusian==1.1.0
86 WebError==0.10.3
86 WebError==0.10.3
87 WebHelpers2==2.0
87 WebHelpers2==2.0
88 WebHelpers==1.3
88 WebHelpers==1.3
89 WebOb==1.3.1
89 WebOb==1.7.3
90 Whoosh==2.7.4
90 Whoosh==2.7.4
91 wsgiref==0.1.2
91 wsgiref==0.1.2
92 zope.cachedescriptors==4.0.0
92 zope.cachedescriptors==4.0.0
93 zope.deprecation==4.1.2
93 zope.deprecation==4.1.2
94 zope.event==4.0.3
94 zope.event==4.0.3
95 zope.interface==4.1.3
95 zope.interface==4.1.3
96
96
97 ## customized/patched libs
97 ## customized/patched libs
98 # our patched version of Pylons==1.0.2
98 # our patched version of Pylons==1.0.2
99 https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1
99 https://code.rhodecode.com/upstream/pylons/archive/707354ee4261b9c10450404fc9852ccea4fd667d.tar.gz?md5=f26633726fa2cd3a340316ee6a5d218f#egg=Pylons==1.0.2.rhodecode-patch-1
100 # not released py-gfm==0.1.3
100 # not released py-gfm==0.1.3
101 https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1
101 https://code.rhodecode.com/upstream/py-gfm/archive/0d66a19bc16e3d49de273c0f797d4e4781e8c0f2.tar.gz?md5=0d0d5385bfb629eea636a80b9c2bfd16#egg=py-gfm==0.1.3.rhodecode-upstream1
102
102
103 # IPYTHON RENDERING
103 # IPYTHON RENDERING
104 # entrypoints backport, pypi version doesn't support egg installs
104 # entrypoints backport, pypi version doesn't support egg installs
105 https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313#egg=entrypoints==0.2.2.rhodecode-upstream1
105 https://code.rhodecode.com/upstream/entrypoints/archive/96e6d645684e1af3d7df5b5272f3fe85a546b233.tar.gz?md5=7db37771aea9ac9fefe093e5d6987313#egg=entrypoints==0.2.2.rhodecode-upstream1
106 nbconvert==5.1.1
106 nbconvert==5.1.1
107 nbformat==4.3.0
107 nbformat==4.3.0
108 jupyter_client==5.0.0
108 jupyter_client==5.0.0
109
109
110 ## cli tools
110 ## cli tools
111 alembic==0.9.2
111 alembic==0.9.2
112 invoke==0.13.0
112 invoke==0.13.0
113 bumpversion==0.5.3
113 bumpversion==0.5.3
114 transifex-client==0.10
114 transifex-client==0.10
115
115
116 ## http servers
116 ## http servers
117 gevent==1.2.2
117 gevent==1.2.2
118 greenlet==0.4.12
118 greenlet==0.4.12
119 gunicorn==19.7.1
119 gunicorn==19.7.1
120 waitress==1.0.2
120 waitress==1.0.2
121 uWSGI==2.0.15
121 uWSGI==2.0.15
122
122
123 ## debug
123 ## debug
124 ipdb==0.10.3
124 ipdb==0.10.3
125 ipython==5.1.0
125 ipython==5.1.0
126 CProfileV==1.0.7
126 CProfileV==1.0.7
127 bottle==0.12.8
127 bottle==0.12.8
128
128
129 ## rhodecode-tools, special case
129 ## rhodecode-tools, special case
130 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4#egg=rhodecode-tools==0.12.0
130 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.12.0.tar.gz?md5=9ca040356fa7e38d3f64529a4cffdca4#egg=rhodecode-tools==0.12.0
131
131
132 ## appenlight
132 ## appenlight
133 appenlight-client==0.6.21
133 appenlight-client==0.6.21
134
134
135 ## test related requirements
135 ## test related requirements
136 -r requirements_test.txt
136 -r requirements_test.txt
@@ -1,15 +1,15 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.1.2
2 pytest==3.1.2
3 py==1.4.34
3 py==1.4.34
4 pytest-cov==2.5.1
4 pytest-cov==2.5.1
5 pytest-sugar==0.8.0
5 pytest-sugar==0.8.0
6 pytest-runner==2.11.1
6 pytest-runner==2.11.1
7 pytest-catchlog==1.2.2
7 pytest-catchlog==1.2.2
8 pytest-profiling==1.2.6
8 pytest-profiling==1.2.6
9 gprof2dot==2016.10.13
9 gprof2dot==2016.10.13
10 pytest-timeout==1.2.0
10 pytest-timeout==1.2.0
11
11
12 mock==1.0.1
12 mock==1.0.1
13 WebTest==1.4.3
13 WebTest==2.0.27
14 cov-core==1.15.0
14 cov-core==1.15.0
15 coverage==3.7.1
15 coverage==3.7.1
@@ -1,533 +1,525 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Pylons middleware initialization
22 Pylons middleware initialization
23 """
23 """
24 import logging
24 import logging
25 from collections import OrderedDict
25 from collections import OrderedDict
26
26
27 from paste.registry import RegistryManager
27 from paste.registry import RegistryManager
28 from paste.gzipper import make_gzip_middleware
28 from paste.gzipper import make_gzip_middleware
29 from pylons.wsgiapp import PylonsApp
29 from pylons.wsgiapp import PylonsApp
30 from pyramid.authorization import ACLAuthorizationPolicy
30 from pyramid.authorization import ACLAuthorizationPolicy
31 from pyramid.config import Configurator
31 from pyramid.config import Configurator
32 from pyramid.settings import asbool, aslist
32 from pyramid.settings import asbool, aslist
33 from pyramid.wsgi import wsgiapp
33 from pyramid.wsgi import wsgiapp
34 from pyramid.httpexceptions import (
34 from pyramid.httpexceptions import (
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound)
35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound)
36 from pyramid.events import ApplicationCreated
36 from pyramid.events import ApplicationCreated
37 from pyramid.renderers import render_to_response
37 from pyramid.renderers import render_to_response
38 from routes.middleware import RoutesMiddleware
38 from routes.middleware import RoutesMiddleware
39 import routes.util
39 import routes.util
40
40
41 import rhodecode
41 import rhodecode
42
42
43 from rhodecode.model import meta
43 from rhodecode.model import meta
44 from rhodecode.config import patches
44 from rhodecode.config import patches
45 from rhodecode.config.routing import STATIC_FILE_PREFIX
45 from rhodecode.config.routing import STATIC_FILE_PREFIX
46 from rhodecode.config.environment import (
46 from rhodecode.config.environment import (
47 load_environment, load_pyramid_environment)
47 load_environment, load_pyramid_environment)
48
48
49 from rhodecode.lib.vcs import VCSCommunicationError
49 from rhodecode.lib.vcs import VCSCommunicationError
50 from rhodecode.lib.exceptions import VCSServerUnavailable
50 from rhodecode.lib.exceptions import VCSServerUnavailable
51 from rhodecode.lib.middleware import csrf
51 from rhodecode.lib.middleware import csrf
52 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
52 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
53 from rhodecode.lib.middleware.error_handling import (
53 from rhodecode.lib.middleware.error_handling import (
54 PylonsErrorHandlingMiddleware)
54 PylonsErrorHandlingMiddleware)
55 from rhodecode.lib.middleware.https_fixup import HttpsFixup
55 from rhodecode.lib.middleware.https_fixup import HttpsFixup
56 from rhodecode.lib.middleware.vcs import VCSMiddleware
56 from rhodecode.lib.middleware.vcs import VCSMiddleware
57 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
57 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
58 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
58 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
59 from rhodecode.subscribers import (
59 from rhodecode.subscribers import (
60 scan_repositories_if_enabled, write_js_routes_if_enabled,
60 scan_repositories_if_enabled, write_js_routes_if_enabled,
61 write_metadata_if_needed)
61 write_metadata_if_needed)
62
62
63
63
64 log = logging.getLogger(__name__)
64 log = logging.getLogger(__name__)
65
65
66
66
67 # this is used to avoid avoid the route lookup overhead in routesmiddleware
67 # this is used to avoid avoid the route lookup overhead in routesmiddleware
68 # for certain routes which won't go to pylons to - eg. static files, debugger
68 # for certain routes which won't go to pylons to - eg. static files, debugger
69 # it is only needed for the pylons migration and can be removed once complete
69 # it is only needed for the pylons migration and can be removed once complete
70 class SkippableRoutesMiddleware(RoutesMiddleware):
70 class SkippableRoutesMiddleware(RoutesMiddleware):
71 """ Routes middleware that allows you to skip prefixes """
71 """ Routes middleware that allows you to skip prefixes """
72
72
73 def __init__(self, *args, **kw):
73 def __init__(self, *args, **kw):
74 self.skip_prefixes = kw.pop('skip_prefixes', [])
74 self.skip_prefixes = kw.pop('skip_prefixes', [])
75 super(SkippableRoutesMiddleware, self).__init__(*args, **kw)
75 super(SkippableRoutesMiddleware, self).__init__(*args, **kw)
76
76
77 def __call__(self, environ, start_response):
77 def __call__(self, environ, start_response):
78 for prefix in self.skip_prefixes:
78 for prefix in self.skip_prefixes:
79 if environ['PATH_INFO'].startswith(prefix):
79 if environ['PATH_INFO'].startswith(prefix):
80 # added to avoid the case when a missing /_static route falls
80 # added to avoid the case when a missing /_static route falls
81 # through to pylons and causes an exception as pylons is
81 # through to pylons and causes an exception as pylons is
82 # expecting wsgiorg.routingargs to be set in the environ
82 # expecting wsgiorg.routingargs to be set in the environ
83 # by RoutesMiddleware.
83 # by RoutesMiddleware.
84 if 'wsgiorg.routing_args' not in environ:
84 if 'wsgiorg.routing_args' not in environ:
85 environ['wsgiorg.routing_args'] = (None, {})
85 environ['wsgiorg.routing_args'] = (None, {})
86 return self.app(environ, start_response)
86 return self.app(environ, start_response)
87
87
88 return super(SkippableRoutesMiddleware, self).__call__(
88 return super(SkippableRoutesMiddleware, self).__call__(
89 environ, start_response)
89 environ, start_response)
90
90
91
91
92 def make_app(global_conf, static_files=True, **app_conf):
92 def make_app(global_conf, static_files=True, **app_conf):
93 """Create a Pylons WSGI application and return it
93 """Create a Pylons WSGI application and return it
94
94
95 ``global_conf``
95 ``global_conf``
96 The inherited configuration for this application. Normally from
96 The inherited configuration for this application. Normally from
97 the [DEFAULT] section of the Paste ini file.
97 the [DEFAULT] section of the Paste ini file.
98
98
99 ``app_conf``
99 ``app_conf``
100 The application's local configuration. Normally specified in
100 The application's local configuration. Normally specified in
101 the [app:<name>] section of the Paste ini file (where <name>
101 the [app:<name>] section of the Paste ini file (where <name>
102 defaults to main).
102 defaults to main).
103
103
104 """
104 """
105 # Apply compatibility patches
105 # Apply compatibility patches
106 patches.kombu_1_5_1_python_2_7_11()
106 patches.kombu_1_5_1_python_2_7_11()
107 patches.inspect_getargspec()
107 patches.inspect_getargspec()
108
108
109 # Configure the Pylons environment
109 # Configure the Pylons environment
110 config = load_environment(global_conf, app_conf)
110 config = load_environment(global_conf, app_conf)
111
111
112 # The Pylons WSGI app
112 # The Pylons WSGI app
113 app = PylonsApp(config=config)
113 app = PylonsApp(config=config)
114 if rhodecode.is_test:
115 app = csrf.CSRFDetector(app)
116
117 expected_origin = config.get('expected_origin')
118 if expected_origin:
119 # The API can be accessed from other Origins.
120 app = csrf.OriginChecker(app, expected_origin,
121 skip_urls=[routes.util.url_for('api')])
122
114
123 # Establish the Registry for this application
115 # Establish the Registry for this application
124 app = RegistryManager(app)
116 app = RegistryManager(app)
125
117
126 app.config = config
118 app.config = config
127
119
128 return app
120 return app
129
121
130
122
131 def make_pyramid_app(global_config, **settings):
123 def make_pyramid_app(global_config, **settings):
132 """
124 """
133 Constructs the WSGI application based on Pyramid and wraps the Pylons based
125 Constructs the WSGI application based on Pyramid and wraps the Pylons based
134 application.
126 application.
135
127
136 Specials:
128 Specials:
137
129
138 * We migrate from Pylons to Pyramid. While doing this, we keep both
130 * We migrate from Pylons to Pyramid. While doing this, we keep both
139 frameworks functional. This involves moving some WSGI middlewares around
131 frameworks functional. This involves moving some WSGI middlewares around
140 and providing access to some data internals, so that the old code is
132 and providing access to some data internals, so that the old code is
141 still functional.
133 still functional.
142
134
143 * The application can also be integrated like a plugin via the call to
135 * The application can also be integrated like a plugin via the call to
144 `includeme`. This is accompanied with the other utility functions which
136 `includeme`. This is accompanied with the other utility functions which
145 are called. Changing this should be done with great care to not break
137 are called. Changing this should be done with great care to not break
146 cases when these fragments are assembled from another place.
138 cases when these fragments are assembled from another place.
147
139
148 """
140 """
149 # The edition string should be available in pylons too, so we add it here
141 # The edition string should be available in pylons too, so we add it here
150 # before copying the settings.
142 # before copying the settings.
151 settings.setdefault('rhodecode.edition', 'Community Edition')
143 settings.setdefault('rhodecode.edition', 'Community Edition')
152
144
153 # As long as our Pylons application does expect "unprepared" settings, make
145 # As long as our Pylons application does expect "unprepared" settings, make
154 # sure that we keep an unmodified copy. This avoids unintentional change of
146 # sure that we keep an unmodified copy. This avoids unintentional change of
155 # behavior in the old application.
147 # behavior in the old application.
156 settings_pylons = settings.copy()
148 settings_pylons = settings.copy()
157
149
158 sanitize_settings_and_apply_defaults(settings)
150 sanitize_settings_and_apply_defaults(settings)
159 config = Configurator(settings=settings)
151 config = Configurator(settings=settings)
160 add_pylons_compat_data(config.registry, global_config, settings_pylons)
152 add_pylons_compat_data(config.registry, global_config, settings_pylons)
161
153
162 load_pyramid_environment(global_config, settings)
154 load_pyramid_environment(global_config, settings)
163
155
164 includeme_first(config)
156 includeme_first(config)
165 includeme(config)
157 includeme(config)
166 pyramid_app = config.make_wsgi_app()
158 pyramid_app = config.make_wsgi_app()
167 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
159 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
168 pyramid_app.config = config
160 pyramid_app.config = config
169
161
170 # creating the app uses a connection - return it after we are done
162 # creating the app uses a connection - return it after we are done
171 meta.Session.remove()
163 meta.Session.remove()
172
164
173 return pyramid_app
165 return pyramid_app
174
166
175
167
176 def make_not_found_view(config):
168 def make_not_found_view(config):
177 """
169 """
178 This creates the view which should be registered as not-found-view to
170 This creates the view which should be registered as not-found-view to
179 pyramid. Basically it contains of the old pylons app, converted to a view.
171 pyramid. Basically it contains of the old pylons app, converted to a view.
180 Additionally it is wrapped by some other middlewares.
172 Additionally it is wrapped by some other middlewares.
181 """
173 """
182 settings = config.registry.settings
174 settings = config.registry.settings
183 vcs_server_enabled = settings['vcs.server.enable']
175 vcs_server_enabled = settings['vcs.server.enable']
184
176
185 # Make pylons app from unprepared settings.
177 # Make pylons app from unprepared settings.
186 pylons_app = make_app(
178 pylons_app = make_app(
187 config.registry._pylons_compat_global_config,
179 config.registry._pylons_compat_global_config,
188 **config.registry._pylons_compat_settings)
180 **config.registry._pylons_compat_settings)
189 config.registry._pylons_compat_config = pylons_app.config
181 config.registry._pylons_compat_config = pylons_app.config
190
182
191 # Appenlight monitoring.
183 # Appenlight monitoring.
192 pylons_app, appenlight_client = wrap_in_appenlight_if_enabled(
184 pylons_app, appenlight_client = wrap_in_appenlight_if_enabled(
193 pylons_app, settings)
185 pylons_app, settings)
194
186
195 # The pylons app is executed inside of the pyramid 404 exception handler.
187 # The pylons app is executed inside of the pyramid 404 exception handler.
196 # Exceptions which are raised inside of it are not handled by pyramid
188 # Exceptions which are raised inside of it are not handled by pyramid
197 # again. Therefore we add a middleware that invokes the error handler in
189 # again. Therefore we add a middleware that invokes the error handler in
198 # case of an exception or error response. This way we return proper error
190 # case of an exception or error response. This way we return proper error
199 # HTML pages in case of an error.
191 # HTML pages in case of an error.
200 reraise = (settings.get('debugtoolbar.enabled', False) or
192 reraise = (settings.get('debugtoolbar.enabled', False) or
201 rhodecode.disable_error_handler)
193 rhodecode.disable_error_handler)
202 pylons_app = PylonsErrorHandlingMiddleware(
194 pylons_app = PylonsErrorHandlingMiddleware(
203 pylons_app, error_handler, reraise)
195 pylons_app, error_handler, reraise)
204
196
205 # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a
197 # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a
206 # view to handle the request. Therefore it is wrapped around the pylons
198 # view to handle the request. Therefore it is wrapped around the pylons
207 # app. It has to be outside of the error handling otherwise error responses
199 # app. It has to be outside of the error handling otherwise error responses
208 # from the vcsserver are converted to HTML error pages. This confuses the
200 # from the vcsserver are converted to HTML error pages. This confuses the
209 # command line tools and the user won't get a meaningful error message.
201 # command line tools and the user won't get a meaningful error message.
210 if vcs_server_enabled:
202 if vcs_server_enabled:
211 pylons_app = VCSMiddleware(
203 pylons_app = VCSMiddleware(
212 pylons_app, settings, appenlight_client, registry=config.registry)
204 pylons_app, settings, appenlight_client, registry=config.registry)
213
205
214 # Convert WSGI app to pyramid view and return it.
206 # Convert WSGI app to pyramid view and return it.
215 return wsgiapp(pylons_app)
207 return wsgiapp(pylons_app)
216
208
217
209
218 def add_pylons_compat_data(registry, global_config, settings):
210 def add_pylons_compat_data(registry, global_config, settings):
219 """
211 """
220 Attach data to the registry to support the Pylons integration.
212 Attach data to the registry to support the Pylons integration.
221 """
213 """
222 registry._pylons_compat_global_config = global_config
214 registry._pylons_compat_global_config = global_config
223 registry._pylons_compat_settings = settings
215 registry._pylons_compat_settings = settings
224
216
225
217
226 def error_handler(exception, request):
218 def error_handler(exception, request):
227 import rhodecode
219 import rhodecode
228 from rhodecode.lib import helpers
220 from rhodecode.lib import helpers
229
221
230 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
222 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
231
223
232 base_response = HTTPInternalServerError()
224 base_response = HTTPInternalServerError()
233 # prefer original exception for the response since it may have headers set
225 # prefer original exception for the response since it may have headers set
234 if isinstance(exception, HTTPException):
226 if isinstance(exception, HTTPException):
235 base_response = exception
227 base_response = exception
236 elif isinstance(exception, VCSCommunicationError):
228 elif isinstance(exception, VCSCommunicationError):
237 base_response = VCSServerUnavailable()
229 base_response = VCSServerUnavailable()
238
230
239 def is_http_error(response):
231 def is_http_error(response):
240 # error which should have traceback
232 # error which should have traceback
241 return response.status_code > 499
233 return response.status_code > 499
242
234
243 if is_http_error(base_response):
235 if is_http_error(base_response):
244 log.exception(
236 log.exception(
245 'error occurred handling this request for path: %s', request.path)
237 'error occurred handling this request for path: %s', request.path)
246
238
247 c = AttributeDict()
239 c = AttributeDict()
248 c.error_message = base_response.status
240 c.error_message = base_response.status
249 c.error_explanation = base_response.explanation or str(base_response)
241 c.error_explanation = base_response.explanation or str(base_response)
250 c.visual = AttributeDict()
242 c.visual = AttributeDict()
251
243
252 c.visual.rhodecode_support_url = (
244 c.visual.rhodecode_support_url = (
253 request.registry.settings.get('rhodecode_support_url') or
245 request.registry.settings.get('rhodecode_support_url') or
254 request.route_url('rhodecode_support')
246 request.route_url('rhodecode_support')
255 )
247 )
256 c.redirect_time = 0
248 c.redirect_time = 0
257 c.rhodecode_name = rhodecode_title
249 c.rhodecode_name = rhodecode_title
258 if not c.rhodecode_name:
250 if not c.rhodecode_name:
259 c.rhodecode_name = 'Rhodecode'
251 c.rhodecode_name = 'Rhodecode'
260
252
261 c.causes = []
253 c.causes = []
262 if hasattr(base_response, 'causes'):
254 if hasattr(base_response, 'causes'):
263 c.causes = base_response.causes
255 c.causes = base_response.causes
264 c.messages = helpers.flash.pop_messages()
256 c.messages = helpers.flash.pop_messages()
265
257
266 response = render_to_response(
258 response = render_to_response(
267 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
259 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
268 response=base_response)
260 response=base_response)
269
261
270 return response
262 return response
271
263
272
264
273 def includeme(config):
265 def includeme(config):
274 settings = config.registry.settings
266 settings = config.registry.settings
275
267
276 # plugin information
268 # plugin information
277 config.registry.rhodecode_plugins = OrderedDict()
269 config.registry.rhodecode_plugins = OrderedDict()
278
270
279 config.add_directive(
271 config.add_directive(
280 'register_rhodecode_plugin', register_rhodecode_plugin)
272 'register_rhodecode_plugin', register_rhodecode_plugin)
281
273
282 if asbool(settings.get('appenlight', 'false')):
274 if asbool(settings.get('appenlight', 'false')):
283 config.include('appenlight_client.ext.pyramid_tween')
275 config.include('appenlight_client.ext.pyramid_tween')
284
276
285 # Includes which are required. The application would fail without them.
277 # Includes which are required. The application would fail without them.
286 config.include('pyramid_mako')
278 config.include('pyramid_mako')
287 config.include('pyramid_beaker')
279 config.include('pyramid_beaker')
288
280
289 config.include('rhodecode.authentication')
281 config.include('rhodecode.authentication')
290 config.include('rhodecode.integrations')
282 config.include('rhodecode.integrations')
291
283
292 # apps
284 # apps
293 config.include('rhodecode.apps._base')
285 config.include('rhodecode.apps._base')
294 config.include('rhodecode.apps.ops')
286 config.include('rhodecode.apps.ops')
295
287
296 config.include('rhodecode.apps.admin')
288 config.include('rhodecode.apps.admin')
297 config.include('rhodecode.apps.channelstream')
289 config.include('rhodecode.apps.channelstream')
298 config.include('rhodecode.apps.login')
290 config.include('rhodecode.apps.login')
299 config.include('rhodecode.apps.home')
291 config.include('rhodecode.apps.home')
300 config.include('rhodecode.apps.repository')
292 config.include('rhodecode.apps.repository')
301 config.include('rhodecode.apps.repo_group')
293 config.include('rhodecode.apps.repo_group')
302 config.include('rhodecode.apps.search')
294 config.include('rhodecode.apps.search')
303 config.include('rhodecode.apps.user_profile')
295 config.include('rhodecode.apps.user_profile')
304 config.include('rhodecode.apps.my_account')
296 config.include('rhodecode.apps.my_account')
305 config.include('rhodecode.apps.svn_support')
297 config.include('rhodecode.apps.svn_support')
306 config.include('rhodecode.apps.gist')
298 config.include('rhodecode.apps.gist')
307
299
308 config.include('rhodecode.apps.debug_style')
300 config.include('rhodecode.apps.debug_style')
309 config.include('rhodecode.tweens')
301 config.include('rhodecode.tweens')
310 config.include('rhodecode.api')
302 config.include('rhodecode.api')
311
303
312 config.add_route(
304 config.add_route(
313 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
305 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
314
306
315 config.add_translation_dirs('rhodecode:i18n/')
307 config.add_translation_dirs('rhodecode:i18n/')
316 settings['default_locale_name'] = settings.get('lang', 'en')
308 settings['default_locale_name'] = settings.get('lang', 'en')
317
309
318 # Add subscribers.
310 # Add subscribers.
319 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
311 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
320 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
312 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
321 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
313 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
322
314
323 config.add_request_method(
315 config.add_request_method(
324 'rhodecode.lib.partial_renderer.get_partial_renderer',
316 'rhodecode.lib.partial_renderer.get_partial_renderer',
325 'get_partial_renderer')
317 'get_partial_renderer')
326
318
327 # events
319 # events
328 # TODO(marcink): this should be done when pyramid migration is finished
320 # TODO(marcink): this should be done when pyramid migration is finished
329 # config.add_subscriber(
321 # config.add_subscriber(
330 # 'rhodecode.integrations.integrations_event_handler',
322 # 'rhodecode.integrations.integrations_event_handler',
331 # 'rhodecode.events.RhodecodeEvent')
323 # 'rhodecode.events.RhodecodeEvent')
332
324
333 # Set the authorization policy.
325 # Set the authorization policy.
334 authz_policy = ACLAuthorizationPolicy()
326 authz_policy = ACLAuthorizationPolicy()
335 config.set_authorization_policy(authz_policy)
327 config.set_authorization_policy(authz_policy)
336
328
337 # Set the default renderer for HTML templates to mako.
329 # Set the default renderer for HTML templates to mako.
338 config.add_mako_renderer('.html')
330 config.add_mako_renderer('.html')
339
331
340 config.add_renderer(
332 config.add_renderer(
341 name='json_ext',
333 name='json_ext',
342 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
334 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
343
335
344 # include RhodeCode plugins
336 # include RhodeCode plugins
345 includes = aslist(settings.get('rhodecode.includes', []))
337 includes = aslist(settings.get('rhodecode.includes', []))
346 for inc in includes:
338 for inc in includes:
347 config.include(inc)
339 config.include(inc)
348
340
349 # This is the glue which allows us to migrate in chunks. By registering the
341 # This is the glue which allows us to migrate in chunks. By registering the
350 # pylons based application as the "Not Found" view in Pyramid, we will
342 # pylons based application as the "Not Found" view in Pyramid, we will
351 # fallback to the old application each time the new one does not yet know
343 # fallback to the old application each time the new one does not yet know
352 # how to handle a request.
344 # how to handle a request.
353 config.add_notfound_view(make_not_found_view(config))
345 config.add_notfound_view(make_not_found_view(config))
354
346
355 if not settings.get('debugtoolbar.enabled', False):
347 if not settings.get('debugtoolbar.enabled', False):
356 # if no toolbar, then any exception gets caught and rendered
348 # if no toolbar, then any exception gets caught and rendered
357 config.add_view(error_handler, context=Exception)
349 config.add_view(error_handler, context=Exception)
358
350
359 config.add_view(error_handler, context=HTTPError)
351 config.add_view(error_handler, context=HTTPError)
360
352
361
353
362 def includeme_first(config):
354 def includeme_first(config):
363 # redirect automatic browser favicon.ico requests to correct place
355 # redirect automatic browser favicon.ico requests to correct place
364 def favicon_redirect(context, request):
356 def favicon_redirect(context, request):
365 return HTTPFound(
357 return HTTPFound(
366 request.static_path('rhodecode:public/images/favicon.ico'))
358 request.static_path('rhodecode:public/images/favicon.ico'))
367
359
368 config.add_view(favicon_redirect, route_name='favicon')
360 config.add_view(favicon_redirect, route_name='favicon')
369 config.add_route('favicon', '/favicon.ico')
361 config.add_route('favicon', '/favicon.ico')
370
362
371 def robots_redirect(context, request):
363 def robots_redirect(context, request):
372 return HTTPFound(
364 return HTTPFound(
373 request.static_path('rhodecode:public/robots.txt'))
365 request.static_path('rhodecode:public/robots.txt'))
374
366
375 config.add_view(robots_redirect, route_name='robots')
367 config.add_view(robots_redirect, route_name='robots')
376 config.add_route('robots', '/robots.txt')
368 config.add_route('robots', '/robots.txt')
377
369
378 config.add_static_view(
370 config.add_static_view(
379 '_static/deform', 'deform:static')
371 '_static/deform', 'deform:static')
380 config.add_static_view(
372 config.add_static_view(
381 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
373 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
382
374
383
375
384 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
376 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
385 """
377 """
386 Apply outer WSGI middlewares around the application.
378 Apply outer WSGI middlewares around the application.
387
379
388 Part of this has been moved up from the Pylons layer, so that the
380 Part of this has been moved up from the Pylons layer, so that the
389 data is also available if old Pylons code is hit through an already ported
381 data is also available if old Pylons code is hit through an already ported
390 view.
382 view.
391 """
383 """
392 settings = config.registry.settings
384 settings = config.registry.settings
393
385
394 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
386 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
395 pyramid_app = HttpsFixup(pyramid_app, settings)
387 pyramid_app = HttpsFixup(pyramid_app, settings)
396
388
397 # Add RoutesMiddleware to support the pylons compatibility tween during
389 # Add RoutesMiddleware to support the pylons compatibility tween during
398 # migration to pyramid.
390 # migration to pyramid.
399 pyramid_app = SkippableRoutesMiddleware(
391 pyramid_app = SkippableRoutesMiddleware(
400 pyramid_app, config.registry._pylons_compat_config['routes.map'],
392 pyramid_app, config.registry._pylons_compat_config['routes.map'],
401 skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar'))
393 skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar'))
402
394
403 pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings)
395 pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings)
404
396
405 if settings['gzip_responses']:
397 if settings['gzip_responses']:
406 pyramid_app = make_gzip_middleware(
398 pyramid_app = make_gzip_middleware(
407 pyramid_app, settings, compress_level=1)
399 pyramid_app, settings, compress_level=1)
408
400
409 # this should be the outer most middleware in the wsgi stack since
401 # this should be the outer most middleware in the wsgi stack since
410 # middleware like Routes make database calls
402 # middleware like Routes make database calls
411 def pyramid_app_with_cleanup(environ, start_response):
403 def pyramid_app_with_cleanup(environ, start_response):
412 try:
404 try:
413 return pyramid_app(environ, start_response)
405 return pyramid_app(environ, start_response)
414 finally:
406 finally:
415 # Dispose current database session and rollback uncommitted
407 # Dispose current database session and rollback uncommitted
416 # transactions.
408 # transactions.
417 meta.Session.remove()
409 meta.Session.remove()
418
410
419 # In a single threaded mode server, on non sqlite db we should have
411 # In a single threaded mode server, on non sqlite db we should have
420 # '0 Current Checked out connections' at the end of a request,
412 # '0 Current Checked out connections' at the end of a request,
421 # if not, then something, somewhere is leaving a connection open
413 # if not, then something, somewhere is leaving a connection open
422 pool = meta.Base.metadata.bind.engine.pool
414 pool = meta.Base.metadata.bind.engine.pool
423 log.debug('sa pool status: %s', pool.status())
415 log.debug('sa pool status: %s', pool.status())
424
416
425 return pyramid_app_with_cleanup
417 return pyramid_app_with_cleanup
426
418
427
419
428 def sanitize_settings_and_apply_defaults(settings):
420 def sanitize_settings_and_apply_defaults(settings):
429 """
421 """
430 Applies settings defaults and does all type conversion.
422 Applies settings defaults and does all type conversion.
431
423
432 We would move all settings parsing and preparation into this place, so that
424 We would move all settings parsing and preparation into this place, so that
433 we have only one place left which deals with this part. The remaining parts
425 we have only one place left which deals with this part. The remaining parts
434 of the application would start to rely fully on well prepared settings.
426 of the application would start to rely fully on well prepared settings.
435
427
436 This piece would later be split up per topic to avoid a big fat monster
428 This piece would later be split up per topic to avoid a big fat monster
437 function.
429 function.
438 """
430 """
439
431
440 # Pyramid's mako renderer has to search in the templates folder so that the
432 # Pyramid's mako renderer has to search in the templates folder so that the
441 # old templates still work. Ported and new templates are expected to use
433 # old templates still work. Ported and new templates are expected to use
442 # real asset specifications for the includes.
434 # real asset specifications for the includes.
443 mako_directories = settings.setdefault('mako.directories', [
435 mako_directories = settings.setdefault('mako.directories', [
444 # Base templates of the original Pylons application
436 # Base templates of the original Pylons application
445 'rhodecode:templates',
437 'rhodecode:templates',
446 ])
438 ])
447 log.debug(
439 log.debug(
448 "Using the following Mako template directories: %s",
440 "Using the following Mako template directories: %s",
449 mako_directories)
441 mako_directories)
450
442
451 # Default includes, possible to change as a user
443 # Default includes, possible to change as a user
452 pyramid_includes = settings.setdefault('pyramid.includes', [
444 pyramid_includes = settings.setdefault('pyramid.includes', [
453 'rhodecode.lib.middleware.request_wrapper',
445 'rhodecode.lib.middleware.request_wrapper',
454 ])
446 ])
455 log.debug(
447 log.debug(
456 "Using the following pyramid.includes: %s",
448 "Using the following pyramid.includes: %s",
457 pyramid_includes)
449 pyramid_includes)
458
450
459 # TODO: johbo: Re-think this, usually the call to config.include
451 # TODO: johbo: Re-think this, usually the call to config.include
460 # should allow to pass in a prefix.
452 # should allow to pass in a prefix.
461 settings.setdefault('rhodecode.api.url', '/_admin/api')
453 settings.setdefault('rhodecode.api.url', '/_admin/api')
462
454
463 # Sanitize generic settings.
455 # Sanitize generic settings.
464 _list_setting(settings, 'default_encoding', 'UTF-8')
456 _list_setting(settings, 'default_encoding', 'UTF-8')
465 _bool_setting(settings, 'is_test', 'false')
457 _bool_setting(settings, 'is_test', 'false')
466 _bool_setting(settings, 'gzip_responses', 'false')
458 _bool_setting(settings, 'gzip_responses', 'false')
467
459
468 # Call split out functions that sanitize settings for each topic.
460 # Call split out functions that sanitize settings for each topic.
469 _sanitize_appenlight_settings(settings)
461 _sanitize_appenlight_settings(settings)
470 _sanitize_vcs_settings(settings)
462 _sanitize_vcs_settings(settings)
471
463
472 return settings
464 return settings
473
465
474
466
475 def _sanitize_appenlight_settings(settings):
467 def _sanitize_appenlight_settings(settings):
476 _bool_setting(settings, 'appenlight', 'false')
468 _bool_setting(settings, 'appenlight', 'false')
477
469
478
470
479 def _sanitize_vcs_settings(settings):
471 def _sanitize_vcs_settings(settings):
480 """
472 """
481 Applies settings defaults and does type conversion for all VCS related
473 Applies settings defaults and does type conversion for all VCS related
482 settings.
474 settings.
483 """
475 """
484 _string_setting(settings, 'vcs.svn.compatible_version', '')
476 _string_setting(settings, 'vcs.svn.compatible_version', '')
485 _string_setting(settings, 'git_rev_filter', '--all')
477 _string_setting(settings, 'git_rev_filter', '--all')
486 _string_setting(settings, 'vcs.hooks.protocol', 'http')
478 _string_setting(settings, 'vcs.hooks.protocol', 'http')
487 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
479 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
488 _string_setting(settings, 'vcs.server', '')
480 _string_setting(settings, 'vcs.server', '')
489 _string_setting(settings, 'vcs.server.log_level', 'debug')
481 _string_setting(settings, 'vcs.server.log_level', 'debug')
490 _string_setting(settings, 'vcs.server.protocol', 'http')
482 _string_setting(settings, 'vcs.server.protocol', 'http')
491 _bool_setting(settings, 'startup.import_repos', 'false')
483 _bool_setting(settings, 'startup.import_repos', 'false')
492 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
484 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
493 _bool_setting(settings, 'vcs.server.enable', 'true')
485 _bool_setting(settings, 'vcs.server.enable', 'true')
494 _bool_setting(settings, 'vcs.start_server', 'false')
486 _bool_setting(settings, 'vcs.start_server', 'false')
495 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
487 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
496 _int_setting(settings, 'vcs.connection_timeout', 3600)
488 _int_setting(settings, 'vcs.connection_timeout', 3600)
497
489
498 # Support legacy values of vcs.scm_app_implementation. Legacy
490 # Support legacy values of vcs.scm_app_implementation. Legacy
499 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
491 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
500 # which is now mapped to 'http'.
492 # which is now mapped to 'http'.
501 scm_app_impl = settings['vcs.scm_app_implementation']
493 scm_app_impl = settings['vcs.scm_app_implementation']
502 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
494 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
503 settings['vcs.scm_app_implementation'] = 'http'
495 settings['vcs.scm_app_implementation'] = 'http'
504
496
505
497
506 def _int_setting(settings, name, default):
498 def _int_setting(settings, name, default):
507 settings[name] = int(settings.get(name, default))
499 settings[name] = int(settings.get(name, default))
508
500
509
501
510 def _bool_setting(settings, name, default):
502 def _bool_setting(settings, name, default):
511 input = settings.get(name, default)
503 input = settings.get(name, default)
512 if isinstance(input, unicode):
504 if isinstance(input, unicode):
513 input = input.encode('utf8')
505 input = input.encode('utf8')
514 settings[name] = asbool(input)
506 settings[name] = asbool(input)
515
507
516
508
517 def _list_setting(settings, name, default):
509 def _list_setting(settings, name, default):
518 raw_value = settings.get(name, default)
510 raw_value = settings.get(name, default)
519
511
520 old_separator = ','
512 old_separator = ','
521 if old_separator in raw_value:
513 if old_separator in raw_value:
522 # If we get a comma separated list, pass it to our own function.
514 # If we get a comma separated list, pass it to our own function.
523 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
515 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
524 else:
516 else:
525 # Otherwise we assume it uses pyramids space/newline separation.
517 # Otherwise we assume it uses pyramids space/newline separation.
526 settings[name] = aslist(raw_value)
518 settings[name] = aslist(raw_value)
527
519
528
520
529 def _string_setting(settings, name, default, lower=True):
521 def _string_setting(settings, name, default, lower=True):
530 value = settings.get(name, default)
522 value = settings.get(name, default)
531 if lower:
523 if lower:
532 value = value.lower()
524 value = value.lower()
533 settings[name] = value
525 settings[name] = value
@@ -1,2023 +1,2026 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 authentication and permission libraries
22 authentication and permission libraries
23 """
23 """
24
24
25 import os
25 import os
26 import inspect
26 import inspect
27 import collections
27 import collections
28 import fnmatch
28 import fnmatch
29 import hashlib
29 import hashlib
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import random
32 import random
33 import traceback
33 import traceback
34 from functools import wraps
34 from functools import wraps
35
35
36 import ipaddress
36 import ipaddress
37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
38 from pylons.i18n.translation import _
38 from pylons.i18n.translation import _
39 # NOTE(marcink): this has to be removed only after pyramid migration,
39 # NOTE(marcink): this has to be removed only after pyramid migration,
40 # replace with _ = request.translate
40 # replace with _ = request.translate
41 from sqlalchemy.orm.exc import ObjectDeletedError
41 from sqlalchemy.orm.exc import ObjectDeletedError
42 from sqlalchemy.orm import joinedload
42 from sqlalchemy.orm import joinedload
43 from zope.cachedescriptors.property import Lazy as LazyProperty
43 from zope.cachedescriptors.property import Lazy as LazyProperty
44
44
45 import rhodecode
45 import rhodecode
46 from rhodecode.model import meta
46 from rhodecode.model import meta
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.user import UserModel
48 from rhodecode.model.user import UserModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 UserIpMap, UserApiKeys, RepoGroup)
51 UserIpMap, UserApiKeys, RepoGroup)
52 from rhodecode.lib import caches
52 from rhodecode.lib import caches
53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
54 from rhodecode.lib.utils import (
54 from rhodecode.lib.utils import (
55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 from rhodecode.lib.caching_query import FromCache
56 from rhodecode.lib.caching_query import FromCache
57
57
58
58
59 if rhodecode.is_unix:
59 if rhodecode.is_unix:
60 import bcrypt
60 import bcrypt
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 csrf_token_key = "csrf_token"
64 csrf_token_key = "csrf_token"
65
65
66
66
67 class PasswordGenerator(object):
67 class PasswordGenerator(object):
68 """
68 """
69 This is a simple class for generating password from different sets of
69 This is a simple class for generating password from different sets of
70 characters
70 characters
71 usage::
71 usage::
72
72
73 passwd_gen = PasswordGenerator()
73 passwd_gen = PasswordGenerator()
74 #print 8-letter password containing only big and small letters
74 #print 8-letter password containing only big and small letters
75 of alphabet
75 of alphabet
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 """
77 """
78 ALPHABETS_NUM = r'''1234567890'''
78 ALPHABETS_NUM = r'''1234567890'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88
88
89 def __init__(self, passwd=''):
89 def __init__(self, passwd=''):
90 self.passwd = passwd
90 self.passwd = passwd
91
91
92 def gen_password(self, length, type_=None):
92 def gen_password(self, length, type_=None):
93 if type_ is None:
93 if type_ is None:
94 type_ = self.ALPHABETS_FULL
94 type_ = self.ALPHABETS_FULL
95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
96 return self.passwd
96 return self.passwd
97
97
98
98
99 class _RhodeCodeCryptoBase(object):
99 class _RhodeCodeCryptoBase(object):
100 ENC_PREF = None
100 ENC_PREF = None
101
101
102 def hash_create(self, str_):
102 def hash_create(self, str_):
103 """
103 """
104 hash the string using
104 hash the string using
105
105
106 :param str_: password to hash
106 :param str_: password to hash
107 """
107 """
108 raise NotImplementedError
108 raise NotImplementedError
109
109
110 def hash_check_with_upgrade(self, password, hashed):
110 def hash_check_with_upgrade(self, password, hashed):
111 """
111 """
112 Returns tuple in which first element is boolean that states that
112 Returns tuple in which first element is boolean that states that
113 given password matches it's hashed version, and the second is new hash
113 given password matches it's hashed version, and the second is new hash
114 of the password, in case this password should be migrated to new
114 of the password, in case this password should be migrated to new
115 cipher.
115 cipher.
116 """
116 """
117 checked_hash = self.hash_check(password, hashed)
117 checked_hash = self.hash_check(password, hashed)
118 return checked_hash, None
118 return checked_hash, None
119
119
120 def hash_check(self, password, hashed):
120 def hash_check(self, password, hashed):
121 """
121 """
122 Checks matching password with it's hashed value.
122 Checks matching password with it's hashed value.
123
123
124 :param password: password
124 :param password: password
125 :param hashed: password in hashed form
125 :param hashed: password in hashed form
126 """
126 """
127 raise NotImplementedError
127 raise NotImplementedError
128
128
129 def _assert_bytes(self, value):
129 def _assert_bytes(self, value):
130 """
130 """
131 Passing in an `unicode` object can lead to hard to detect issues
131 Passing in an `unicode` object can lead to hard to detect issues
132 if passwords contain non-ascii characters. Doing a type check
132 if passwords contain non-ascii characters. Doing a type check
133 during runtime, so that such mistakes are detected early on.
133 during runtime, so that such mistakes are detected early on.
134 """
134 """
135 if not isinstance(value, str):
135 if not isinstance(value, str):
136 raise TypeError(
136 raise TypeError(
137 "Bytestring required as input, got %r." % (value, ))
137 "Bytestring required as input, got %r." % (value, ))
138
138
139
139
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 ENC_PREF = ('$2a$10', '$2b$10')
141 ENC_PREF = ('$2a$10', '$2b$10')
142
142
143 def hash_create(self, str_):
143 def hash_create(self, str_):
144 self._assert_bytes(str_)
144 self._assert_bytes(str_)
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146
146
147 def hash_check_with_upgrade(self, password, hashed):
147 def hash_check_with_upgrade(self, password, hashed):
148 """
148 """
149 Returns tuple in which first element is boolean that states that
149 Returns tuple in which first element is boolean that states that
150 given password matches it's hashed version, and the second is new hash
150 given password matches it's hashed version, and the second is new hash
151 of the password, in case this password should be migrated to new
151 of the password, in case this password should be migrated to new
152 cipher.
152 cipher.
153
153
154 This implements special upgrade logic which works like that:
154 This implements special upgrade logic which works like that:
155 - check if the given password == bcrypted hash, if yes then we
155 - check if the given password == bcrypted hash, if yes then we
156 properly used password and it was already in bcrypt. Proceed
156 properly used password and it was already in bcrypt. Proceed
157 without any changes
157 without any changes
158 - if bcrypt hash check is not working try with sha256. If hash compare
158 - if bcrypt hash check is not working try with sha256. If hash compare
159 is ok, it means we using correct but old hashed password. indicate
159 is ok, it means we using correct but old hashed password. indicate
160 hash change and proceed
160 hash change and proceed
161 """
161 """
162
162
163 new_hash = None
163 new_hash = None
164
164
165 # regular pw check
165 # regular pw check
166 password_match_bcrypt = self.hash_check(password, hashed)
166 password_match_bcrypt = self.hash_check(password, hashed)
167
167
168 # now we want to know if the password was maybe from sha256
168 # now we want to know if the password was maybe from sha256
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 if not password_match_bcrypt:
170 if not password_match_bcrypt:
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 new_hash = self.hash_create(password) # make new bcrypt hash
172 new_hash = self.hash_create(password) # make new bcrypt hash
173 password_match_bcrypt = True
173 password_match_bcrypt = True
174
174
175 return password_match_bcrypt, new_hash
175 return password_match_bcrypt, new_hash
176
176
177 def hash_check(self, password, hashed):
177 def hash_check(self, password, hashed):
178 """
178 """
179 Checks matching password with it's hashed value.
179 Checks matching password with it's hashed value.
180
180
181 :param password: password
181 :param password: password
182 :param hashed: password in hashed form
182 :param hashed: password in hashed form
183 """
183 """
184 self._assert_bytes(password)
184 self._assert_bytes(password)
185 try:
185 try:
186 return bcrypt.hashpw(password, hashed) == hashed
186 return bcrypt.hashpw(password, hashed) == hashed
187 except ValueError as e:
187 except ValueError as e:
188 # we're having a invalid salt here probably, we should not crash
188 # we're having a invalid salt here probably, we should not crash
189 # just return with False as it would be a wrong password.
189 # just return with False as it would be a wrong password.
190 log.debug('Failed to check password hash using bcrypt %s',
190 log.debug('Failed to check password hash using bcrypt %s',
191 safe_str(e))
191 safe_str(e))
192
192
193 return False
193 return False
194
194
195
195
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 ENC_PREF = '_'
197 ENC_PREF = '_'
198
198
199 def hash_create(self, str_):
199 def hash_create(self, str_):
200 self._assert_bytes(str_)
200 self._assert_bytes(str_)
201 return hashlib.sha256(str_).hexdigest()
201 return hashlib.sha256(str_).hexdigest()
202
202
203 def hash_check(self, password, hashed):
203 def hash_check(self, password, hashed):
204 """
204 """
205 Checks matching password with it's hashed value.
205 Checks matching password with it's hashed value.
206
206
207 :param password: password
207 :param password: password
208 :param hashed: password in hashed form
208 :param hashed: password in hashed form
209 """
209 """
210 self._assert_bytes(password)
210 self._assert_bytes(password)
211 return hashlib.sha256(password).hexdigest() == hashed
211 return hashlib.sha256(password).hexdigest() == hashed
212
212
213
213
214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
215 ENC_PREF = '_'
215 ENC_PREF = '_'
216
216
217 def hash_create(self, str_):
217 def hash_create(self, str_):
218 self._assert_bytes(str_)
218 self._assert_bytes(str_)
219 return hashlib.md5(str_).hexdigest()
219 return hashlib.md5(str_).hexdigest()
220
220
221 def hash_check(self, password, hashed):
221 def hash_check(self, password, hashed):
222 """
222 """
223 Checks matching password with it's hashed value.
223 Checks matching password with it's hashed value.
224
224
225 :param password: password
225 :param password: password
226 :param hashed: password in hashed form
226 :param hashed: password in hashed form
227 """
227 """
228 self._assert_bytes(password)
228 self._assert_bytes(password)
229 return hashlib.md5(password).hexdigest() == hashed
229 return hashlib.md5(password).hexdigest() == hashed
230
230
231
231
232 def crypto_backend():
232 def crypto_backend():
233 """
233 """
234 Return the matching crypto backend.
234 Return the matching crypto backend.
235
235
236 Selection is based on if we run tests or not, we pick md5 backend to run
236 Selection is based on if we run tests or not, we pick md5 backend to run
237 tests faster since BCRYPT is expensive to calculate
237 tests faster since BCRYPT is expensive to calculate
238 """
238 """
239 if rhodecode.is_test:
239 if rhodecode.is_test:
240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
241 else:
241 else:
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243
243
244 return RhodeCodeCrypto
244 return RhodeCodeCrypto
245
245
246
246
247 def get_crypt_password(password):
247 def get_crypt_password(password):
248 """
248 """
249 Create the hash of `password` with the active crypto backend.
249 Create the hash of `password` with the active crypto backend.
250
250
251 :param password: The cleartext password.
251 :param password: The cleartext password.
252 :type password: unicode
252 :type password: unicode
253 """
253 """
254 password = safe_str(password)
254 password = safe_str(password)
255 return crypto_backend().hash_create(password)
255 return crypto_backend().hash_create(password)
256
256
257
257
258 def check_password(password, hashed):
258 def check_password(password, hashed):
259 """
259 """
260 Check if the value in `password` matches the hash in `hashed`.
260 Check if the value in `password` matches the hash in `hashed`.
261
261
262 :param password: The cleartext password.
262 :param password: The cleartext password.
263 :type password: unicode
263 :type password: unicode
264
264
265 :param hashed: The expected hashed version of the password.
265 :param hashed: The expected hashed version of the password.
266 :type hashed: The hash has to be passed in in text representation.
266 :type hashed: The hash has to be passed in in text representation.
267 """
267 """
268 password = safe_str(password)
268 password = safe_str(password)
269 return crypto_backend().hash_check(password, hashed)
269 return crypto_backend().hash_check(password, hashed)
270
270
271
271
272 def generate_auth_token(data, salt=None):
272 def generate_auth_token(data, salt=None):
273 """
273 """
274 Generates API KEY from given string
274 Generates API KEY from given string
275 """
275 """
276
276
277 if salt is None:
277 if salt is None:
278 salt = os.urandom(16)
278 salt = os.urandom(16)
279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
280
280
281
281
282 class CookieStoreWrapper(object):
282 class CookieStoreWrapper(object):
283
283
284 def __init__(self, cookie_store):
284 def __init__(self, cookie_store):
285 self.cookie_store = cookie_store
285 self.cookie_store = cookie_store
286
286
287 def __repr__(self):
287 def __repr__(self):
288 return 'CookieStore<%s>' % (self.cookie_store)
288 return 'CookieStore<%s>' % (self.cookie_store)
289
289
290 def get(self, key, other=None):
290 def get(self, key, other=None):
291 if isinstance(self.cookie_store, dict):
291 if isinstance(self.cookie_store, dict):
292 return self.cookie_store.get(key, other)
292 return self.cookie_store.get(key, other)
293 elif isinstance(self.cookie_store, AuthUser):
293 elif isinstance(self.cookie_store, AuthUser):
294 return self.cookie_store.__dict__.get(key, other)
294 return self.cookie_store.__dict__.get(key, other)
295
295
296
296
297 def _cached_perms_data(user_id, scope, user_is_admin,
297 def _cached_perms_data(user_id, scope, user_is_admin,
298 user_inherit_default_permissions, explicit, algo):
298 user_inherit_default_permissions, explicit, algo):
299
299
300 permissions = PermissionCalculator(
300 permissions = PermissionCalculator(
301 user_id, scope, user_is_admin, user_inherit_default_permissions,
301 user_id, scope, user_is_admin, user_inherit_default_permissions,
302 explicit, algo)
302 explicit, algo)
303 return permissions.calculate()
303 return permissions.calculate()
304
304
305
305
306 class PermOrigin(object):
306 class PermOrigin(object):
307 ADMIN = 'superadmin'
307 ADMIN = 'superadmin'
308
308
309 REPO_USER = 'user:%s'
309 REPO_USER = 'user:%s'
310 REPO_USERGROUP = 'usergroup:%s'
310 REPO_USERGROUP = 'usergroup:%s'
311 REPO_OWNER = 'repo.owner'
311 REPO_OWNER = 'repo.owner'
312 REPO_DEFAULT = 'repo.default'
312 REPO_DEFAULT = 'repo.default'
313 REPO_PRIVATE = 'repo.private'
313 REPO_PRIVATE = 'repo.private'
314
314
315 REPOGROUP_USER = 'user:%s'
315 REPOGROUP_USER = 'user:%s'
316 REPOGROUP_USERGROUP = 'usergroup:%s'
316 REPOGROUP_USERGROUP = 'usergroup:%s'
317 REPOGROUP_OWNER = 'group.owner'
317 REPOGROUP_OWNER = 'group.owner'
318 REPOGROUP_DEFAULT = 'group.default'
318 REPOGROUP_DEFAULT = 'group.default'
319
319
320 USERGROUP_USER = 'user:%s'
320 USERGROUP_USER = 'user:%s'
321 USERGROUP_USERGROUP = 'usergroup:%s'
321 USERGROUP_USERGROUP = 'usergroup:%s'
322 USERGROUP_OWNER = 'usergroup.owner'
322 USERGROUP_OWNER = 'usergroup.owner'
323 USERGROUP_DEFAULT = 'usergroup.default'
323 USERGROUP_DEFAULT = 'usergroup.default'
324
324
325
325
326 class PermOriginDict(dict):
326 class PermOriginDict(dict):
327 """
327 """
328 A special dict used for tracking permissions along with their origins.
328 A special dict used for tracking permissions along with their origins.
329
329
330 `__setitem__` has been overridden to expect a tuple(perm, origin)
330 `__setitem__` has been overridden to expect a tuple(perm, origin)
331 `__getitem__` will return only the perm
331 `__getitem__` will return only the perm
332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
333
333
334 >>> perms = PermOriginDict()
334 >>> perms = PermOriginDict()
335 >>> perms['resource'] = 'read', 'default'
335 >>> perms['resource'] = 'read', 'default'
336 >>> perms['resource']
336 >>> perms['resource']
337 'read'
337 'read'
338 >>> perms['resource'] = 'write', 'admin'
338 >>> perms['resource'] = 'write', 'admin'
339 >>> perms['resource']
339 >>> perms['resource']
340 'write'
340 'write'
341 >>> perms.perm_origin_stack
341 >>> perms.perm_origin_stack
342 {'resource': [('read', 'default'), ('write', 'admin')]}
342 {'resource': [('read', 'default'), ('write', 'admin')]}
343 """
343 """
344
344
345 def __init__(self, *args, **kw):
345 def __init__(self, *args, **kw):
346 dict.__init__(self, *args, **kw)
346 dict.__init__(self, *args, **kw)
347 self.perm_origin_stack = {}
347 self.perm_origin_stack = {}
348
348
349 def __setitem__(self, key, (perm, origin)):
349 def __setitem__(self, key, (perm, origin)):
350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
351 dict.__setitem__(self, key, perm)
351 dict.__setitem__(self, key, perm)
352
352
353
353
354 class PermissionCalculator(object):
354 class PermissionCalculator(object):
355
355
356 def __init__(
356 def __init__(
357 self, user_id, scope, user_is_admin,
357 self, user_id, scope, user_is_admin,
358 user_inherit_default_permissions, explicit, algo):
358 user_inherit_default_permissions, explicit, algo):
359 self.user_id = user_id
359 self.user_id = user_id
360 self.user_is_admin = user_is_admin
360 self.user_is_admin = user_is_admin
361 self.inherit_default_permissions = user_inherit_default_permissions
361 self.inherit_default_permissions = user_inherit_default_permissions
362 self.explicit = explicit
362 self.explicit = explicit
363 self.algo = algo
363 self.algo = algo
364
364
365 scope = scope or {}
365 scope = scope or {}
366 self.scope_repo_id = scope.get('repo_id')
366 self.scope_repo_id = scope.get('repo_id')
367 self.scope_repo_group_id = scope.get('repo_group_id')
367 self.scope_repo_group_id = scope.get('repo_group_id')
368 self.scope_user_group_id = scope.get('user_group_id')
368 self.scope_user_group_id = scope.get('user_group_id')
369
369
370 self.default_user_id = User.get_default_user(cache=True).user_id
370 self.default_user_id = User.get_default_user(cache=True).user_id
371
371
372 self.permissions_repositories = PermOriginDict()
372 self.permissions_repositories = PermOriginDict()
373 self.permissions_repository_groups = PermOriginDict()
373 self.permissions_repository_groups = PermOriginDict()
374 self.permissions_user_groups = PermOriginDict()
374 self.permissions_user_groups = PermOriginDict()
375 self.permissions_global = set()
375 self.permissions_global = set()
376
376
377 self.default_repo_perms = Permission.get_default_repo_perms(
377 self.default_repo_perms = Permission.get_default_repo_perms(
378 self.default_user_id, self.scope_repo_id)
378 self.default_user_id, self.scope_repo_id)
379 self.default_repo_groups_perms = Permission.get_default_group_perms(
379 self.default_repo_groups_perms = Permission.get_default_group_perms(
380 self.default_user_id, self.scope_repo_group_id)
380 self.default_user_id, self.scope_repo_group_id)
381 self.default_user_group_perms = \
381 self.default_user_group_perms = \
382 Permission.get_default_user_group_perms(
382 Permission.get_default_user_group_perms(
383 self.default_user_id, self.scope_user_group_id)
383 self.default_user_id, self.scope_user_group_id)
384
384
385 def calculate(self):
385 def calculate(self):
386 if self.user_is_admin:
386 if self.user_is_admin:
387 return self._admin_permissions()
387 return self._admin_permissions()
388
388
389 self._calculate_global_default_permissions()
389 self._calculate_global_default_permissions()
390 self._calculate_global_permissions()
390 self._calculate_global_permissions()
391 self._calculate_default_permissions()
391 self._calculate_default_permissions()
392 self._calculate_repository_permissions()
392 self._calculate_repository_permissions()
393 self._calculate_repository_group_permissions()
393 self._calculate_repository_group_permissions()
394 self._calculate_user_group_permissions()
394 self._calculate_user_group_permissions()
395 return self._permission_structure()
395 return self._permission_structure()
396
396
397 def _admin_permissions(self):
397 def _admin_permissions(self):
398 """
398 """
399 admin user have all default rights for repositories
399 admin user have all default rights for repositories
400 and groups set to admin
400 and groups set to admin
401 """
401 """
402 self.permissions_global.add('hg.admin')
402 self.permissions_global.add('hg.admin')
403 self.permissions_global.add('hg.create.write_on_repogroup.true')
403 self.permissions_global.add('hg.create.write_on_repogroup.true')
404
404
405 # repositories
405 # repositories
406 for perm in self.default_repo_perms:
406 for perm in self.default_repo_perms:
407 r_k = perm.UserRepoToPerm.repository.repo_name
407 r_k = perm.UserRepoToPerm.repository.repo_name
408 p = 'repository.admin'
408 p = 'repository.admin'
409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
410
410
411 # repository groups
411 # repository groups
412 for perm in self.default_repo_groups_perms:
412 for perm in self.default_repo_groups_perms:
413 rg_k = perm.UserRepoGroupToPerm.group.group_name
413 rg_k = perm.UserRepoGroupToPerm.group.group_name
414 p = 'group.admin'
414 p = 'group.admin'
415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
416
416
417 # user groups
417 # user groups
418 for perm in self.default_user_group_perms:
418 for perm in self.default_user_group_perms:
419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
420 p = 'usergroup.admin'
420 p = 'usergroup.admin'
421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
422
422
423 return self._permission_structure()
423 return self._permission_structure()
424
424
425 def _calculate_global_default_permissions(self):
425 def _calculate_global_default_permissions(self):
426 """
426 """
427 global permissions taken from the default user
427 global permissions taken from the default user
428 """
428 """
429 default_global_perms = UserToPerm.query()\
429 default_global_perms = UserToPerm.query()\
430 .filter(UserToPerm.user_id == self.default_user_id)\
430 .filter(UserToPerm.user_id == self.default_user_id)\
431 .options(joinedload(UserToPerm.permission))
431 .options(joinedload(UserToPerm.permission))
432
432
433 for perm in default_global_perms:
433 for perm in default_global_perms:
434 self.permissions_global.add(perm.permission.permission_name)
434 self.permissions_global.add(perm.permission.permission_name)
435
435
436 def _calculate_global_permissions(self):
436 def _calculate_global_permissions(self):
437 """
437 """
438 Set global system permissions with user permissions or permissions
438 Set global system permissions with user permissions or permissions
439 taken from the user groups of the current user.
439 taken from the user groups of the current user.
440
440
441 The permissions include repo creating, repo group creating, forking
441 The permissions include repo creating, repo group creating, forking
442 etc.
442 etc.
443 """
443 """
444
444
445 # now we read the defined permissions and overwrite what we have set
445 # now we read the defined permissions and overwrite what we have set
446 # before those can be configured from groups or users explicitly.
446 # before those can be configured from groups or users explicitly.
447
447
448 # TODO: johbo: This seems to be out of sync, find out the reason
448 # TODO: johbo: This seems to be out of sync, find out the reason
449 # for the comment below and update it.
449 # for the comment below and update it.
450
450
451 # In case we want to extend this list we should be always in sync with
451 # In case we want to extend this list we should be always in sync with
452 # User.DEFAULT_USER_PERMISSIONS definitions
452 # User.DEFAULT_USER_PERMISSIONS definitions
453 _configurable = frozenset([
453 _configurable = frozenset([
454 'hg.fork.none', 'hg.fork.repository',
454 'hg.fork.none', 'hg.fork.repository',
455 'hg.create.none', 'hg.create.repository',
455 'hg.create.none', 'hg.create.repository',
456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
458 'hg.create.write_on_repogroup.false',
458 'hg.create.write_on_repogroup.false',
459 'hg.create.write_on_repogroup.true',
459 'hg.create.write_on_repogroup.true',
460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
461 ])
461 ])
462
462
463 # USER GROUPS comes first user group global permissions
463 # USER GROUPS comes first user group global permissions
464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
465 .options(joinedload(UserGroupToPerm.permission))\
465 .options(joinedload(UserGroupToPerm.permission))\
466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
467 UserGroupMember.users_group_id))\
467 UserGroupMember.users_group_id))\
468 .filter(UserGroupMember.user_id == self.user_id)\
468 .filter(UserGroupMember.user_id == self.user_id)\
469 .order_by(UserGroupToPerm.users_group_id)\
469 .order_by(UserGroupToPerm.users_group_id)\
470 .all()
470 .all()
471
471
472 # need to group here by groups since user can be in more than
472 # need to group here by groups since user can be in more than
473 # one group, so we get all groups
473 # one group, so we get all groups
474 _explicit_grouped_perms = [
474 _explicit_grouped_perms = [
475 [x, list(y)] for x, y in
475 [x, list(y)] for x, y in
476 itertools.groupby(user_perms_from_users_groups,
476 itertools.groupby(user_perms_from_users_groups,
477 lambda _x: _x.users_group)]
477 lambda _x: _x.users_group)]
478
478
479 for gr, perms in _explicit_grouped_perms:
479 for gr, perms in _explicit_grouped_perms:
480 # since user can be in multiple groups iterate over them and
480 # since user can be in multiple groups iterate over them and
481 # select the lowest permissions first (more explicit)
481 # select the lowest permissions first (more explicit)
482 # TODO: marcink: do this^^
482 # TODO: marcink: do this^^
483
483
484 # group doesn't inherit default permissions so we actually set them
484 # group doesn't inherit default permissions so we actually set them
485 if not gr.inherit_default_permissions:
485 if not gr.inherit_default_permissions:
486 # NEED TO IGNORE all previously set configurable permissions
486 # NEED TO IGNORE all previously set configurable permissions
487 # and replace them with explicitly set from this user
487 # and replace them with explicitly set from this user
488 # group permissions
488 # group permissions
489 self.permissions_global = self.permissions_global.difference(
489 self.permissions_global = self.permissions_global.difference(
490 _configurable)
490 _configurable)
491 for perm in perms:
491 for perm in perms:
492 self.permissions_global.add(perm.permission.permission_name)
492 self.permissions_global.add(perm.permission.permission_name)
493
493
494 # user explicit global permissions
494 # user explicit global permissions
495 user_perms = Session().query(UserToPerm)\
495 user_perms = Session().query(UserToPerm)\
496 .options(joinedload(UserToPerm.permission))\
496 .options(joinedload(UserToPerm.permission))\
497 .filter(UserToPerm.user_id == self.user_id).all()
497 .filter(UserToPerm.user_id == self.user_id).all()
498
498
499 if not self.inherit_default_permissions:
499 if not self.inherit_default_permissions:
500 # NEED TO IGNORE all configurable permissions and
500 # NEED TO IGNORE all configurable permissions and
501 # replace them with explicitly set from this user permissions
501 # replace them with explicitly set from this user permissions
502 self.permissions_global = self.permissions_global.difference(
502 self.permissions_global = self.permissions_global.difference(
503 _configurable)
503 _configurable)
504 for perm in user_perms:
504 for perm in user_perms:
505 self.permissions_global.add(perm.permission.permission_name)
505 self.permissions_global.add(perm.permission.permission_name)
506
506
507 def _calculate_default_permissions(self):
507 def _calculate_default_permissions(self):
508 """
508 """
509 Set default user permissions for repositories, repository groups
509 Set default user permissions for repositories, repository groups
510 taken from the default user.
510 taken from the default user.
511
511
512 Calculate inheritance of object permissions based on what we have now
512 Calculate inheritance of object permissions based on what we have now
513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
514 explicitly set. Inherit is the opposite of .false being there.
514 explicitly set. Inherit is the opposite of .false being there.
515
515
516 .. note::
516 .. note::
517
517
518 the syntax is little bit odd but what we need to check here is
518 the syntax is little bit odd but what we need to check here is
519 the opposite of .false permission being in the list so even for
519 the opposite of .false permission being in the list so even for
520 inconsistent state when both .true/.false is there
520 inconsistent state when both .true/.false is there
521 .false is more important
521 .false is more important
522
522
523 """
523 """
524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
525 in self.permissions_global)
525 in self.permissions_global)
526
526
527 # defaults for repositories, taken from `default` user permissions
527 # defaults for repositories, taken from `default` user permissions
528 # on given repo
528 # on given repo
529 for perm in self.default_repo_perms:
529 for perm in self.default_repo_perms:
530 r_k = perm.UserRepoToPerm.repository.repo_name
530 r_k = perm.UserRepoToPerm.repository.repo_name
531 o = PermOrigin.REPO_DEFAULT
531 o = PermOrigin.REPO_DEFAULT
532 if perm.Repository.private and not (
532 if perm.Repository.private and not (
533 perm.Repository.user_id == self.user_id):
533 perm.Repository.user_id == self.user_id):
534 # disable defaults for private repos,
534 # disable defaults for private repos,
535 p = 'repository.none'
535 p = 'repository.none'
536 o = PermOrigin.REPO_PRIVATE
536 o = PermOrigin.REPO_PRIVATE
537 elif perm.Repository.user_id == self.user_id:
537 elif perm.Repository.user_id == self.user_id:
538 # set admin if owner
538 # set admin if owner
539 p = 'repository.admin'
539 p = 'repository.admin'
540 o = PermOrigin.REPO_OWNER
540 o = PermOrigin.REPO_OWNER
541 else:
541 else:
542 p = perm.Permission.permission_name
542 p = perm.Permission.permission_name
543 # if we decide this user isn't inheriting permissions from
543 # if we decide this user isn't inheriting permissions from
544 # default user we set him to .none so only explicit
544 # default user we set him to .none so only explicit
545 # permissions work
545 # permissions work
546 if not user_inherit_object_permissions:
546 if not user_inherit_object_permissions:
547 p = 'repository.none'
547 p = 'repository.none'
548 self.permissions_repositories[r_k] = p, o
548 self.permissions_repositories[r_k] = p, o
549
549
550 # defaults for repository groups taken from `default` user permission
550 # defaults for repository groups taken from `default` user permission
551 # on given group
551 # on given group
552 for perm in self.default_repo_groups_perms:
552 for perm in self.default_repo_groups_perms:
553 rg_k = perm.UserRepoGroupToPerm.group.group_name
553 rg_k = perm.UserRepoGroupToPerm.group.group_name
554 o = PermOrigin.REPOGROUP_DEFAULT
554 o = PermOrigin.REPOGROUP_DEFAULT
555 if perm.RepoGroup.user_id == self.user_id:
555 if perm.RepoGroup.user_id == self.user_id:
556 # set admin if owner
556 # set admin if owner
557 p = 'group.admin'
557 p = 'group.admin'
558 o = PermOrigin.REPOGROUP_OWNER
558 o = PermOrigin.REPOGROUP_OWNER
559 else:
559 else:
560 p = perm.Permission.permission_name
560 p = perm.Permission.permission_name
561
561
562 # if we decide this user isn't inheriting permissions from default
562 # if we decide this user isn't inheriting permissions from default
563 # user we set him to .none so only explicit permissions work
563 # user we set him to .none so only explicit permissions work
564 if not user_inherit_object_permissions:
564 if not user_inherit_object_permissions:
565 p = 'group.none'
565 p = 'group.none'
566 self.permissions_repository_groups[rg_k] = p, o
566 self.permissions_repository_groups[rg_k] = p, o
567
567
568 # defaults for user groups taken from `default` user permission
568 # defaults for user groups taken from `default` user permission
569 # on given user group
569 # on given user group
570 for perm in self.default_user_group_perms:
570 for perm in self.default_user_group_perms:
571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
572 o = PermOrigin.USERGROUP_DEFAULT
572 o = PermOrigin.USERGROUP_DEFAULT
573 if perm.UserGroup.user_id == self.user_id:
573 if perm.UserGroup.user_id == self.user_id:
574 # set admin if owner
574 # set admin if owner
575 p = 'usergroup.admin'
575 p = 'usergroup.admin'
576 o = PermOrigin.USERGROUP_OWNER
576 o = PermOrigin.USERGROUP_OWNER
577 else:
577 else:
578 p = perm.Permission.permission_name
578 p = perm.Permission.permission_name
579
579
580 # if we decide this user isn't inheriting permissions from default
580 # if we decide this user isn't inheriting permissions from default
581 # user we set him to .none so only explicit permissions work
581 # user we set him to .none so only explicit permissions work
582 if not user_inherit_object_permissions:
582 if not user_inherit_object_permissions:
583 p = 'usergroup.none'
583 p = 'usergroup.none'
584 self.permissions_user_groups[u_k] = p, o
584 self.permissions_user_groups[u_k] = p, o
585
585
586 def _calculate_repository_permissions(self):
586 def _calculate_repository_permissions(self):
587 """
587 """
588 Repository permissions for the current user.
588 Repository permissions for the current user.
589
589
590 Check if the user is part of user groups for this repository and
590 Check if the user is part of user groups for this repository and
591 fill in the permission from it. `_choose_permission` decides of which
591 fill in the permission from it. `_choose_permission` decides of which
592 permission should be selected based on selected method.
592 permission should be selected based on selected method.
593 """
593 """
594
594
595 # user group for repositories permissions
595 # user group for repositories permissions
596 user_repo_perms_from_user_group = Permission\
596 user_repo_perms_from_user_group = Permission\
597 .get_default_repo_perms_from_user_group(
597 .get_default_repo_perms_from_user_group(
598 self.user_id, self.scope_repo_id)
598 self.user_id, self.scope_repo_id)
599
599
600 multiple_counter = collections.defaultdict(int)
600 multiple_counter = collections.defaultdict(int)
601 for perm in user_repo_perms_from_user_group:
601 for perm in user_repo_perms_from_user_group:
602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
604 multiple_counter[r_k] += 1
604 multiple_counter[r_k] += 1
605 p = perm.Permission.permission_name
605 p = perm.Permission.permission_name
606 o = PermOrigin.REPO_USERGROUP % ug_k
606 o = PermOrigin.REPO_USERGROUP % ug_k
607
607
608 if perm.Repository.user_id == self.user_id:
608 if perm.Repository.user_id == self.user_id:
609 # set admin if owner
609 # set admin if owner
610 p = 'repository.admin'
610 p = 'repository.admin'
611 o = PermOrigin.REPO_OWNER
611 o = PermOrigin.REPO_OWNER
612 else:
612 else:
613 if multiple_counter[r_k] > 1:
613 if multiple_counter[r_k] > 1:
614 cur_perm = self.permissions_repositories[r_k]
614 cur_perm = self.permissions_repositories[r_k]
615 p = self._choose_permission(p, cur_perm)
615 p = self._choose_permission(p, cur_perm)
616 self.permissions_repositories[r_k] = p, o
616 self.permissions_repositories[r_k] = p, o
617
617
618 # user explicit permissions for repositories, overrides any specified
618 # user explicit permissions for repositories, overrides any specified
619 # by the group permission
619 # by the group permission
620 user_repo_perms = Permission.get_default_repo_perms(
620 user_repo_perms = Permission.get_default_repo_perms(
621 self.user_id, self.scope_repo_id)
621 self.user_id, self.scope_repo_id)
622 for perm in user_repo_perms:
622 for perm in user_repo_perms:
623 r_k = perm.UserRepoToPerm.repository.repo_name
623 r_k = perm.UserRepoToPerm.repository.repo_name
624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
625 # set admin if owner
625 # set admin if owner
626 if perm.Repository.user_id == self.user_id:
626 if perm.Repository.user_id == self.user_id:
627 p = 'repository.admin'
627 p = 'repository.admin'
628 o = PermOrigin.REPO_OWNER
628 o = PermOrigin.REPO_OWNER
629 else:
629 else:
630 p = perm.Permission.permission_name
630 p = perm.Permission.permission_name
631 if not self.explicit:
631 if not self.explicit:
632 cur_perm = self.permissions_repositories.get(
632 cur_perm = self.permissions_repositories.get(
633 r_k, 'repository.none')
633 r_k, 'repository.none')
634 p = self._choose_permission(p, cur_perm)
634 p = self._choose_permission(p, cur_perm)
635 self.permissions_repositories[r_k] = p, o
635 self.permissions_repositories[r_k] = p, o
636
636
637 def _calculate_repository_group_permissions(self):
637 def _calculate_repository_group_permissions(self):
638 """
638 """
639 Repository group permissions for the current user.
639 Repository group permissions for the current user.
640
640
641 Check if the user is part of user groups for repository groups and
641 Check if the user is part of user groups for repository groups and
642 fill in the permissions from it. `_choose_permmission` decides of which
642 fill in the permissions from it. `_choose_permmission` decides of which
643 permission should be selected based on selected method.
643 permission should be selected based on selected method.
644 """
644 """
645 # user group for repo groups permissions
645 # user group for repo groups permissions
646 user_repo_group_perms_from_user_group = Permission\
646 user_repo_group_perms_from_user_group = Permission\
647 .get_default_group_perms_from_user_group(
647 .get_default_group_perms_from_user_group(
648 self.user_id, self.scope_repo_group_id)
648 self.user_id, self.scope_repo_group_id)
649
649
650 multiple_counter = collections.defaultdict(int)
650 multiple_counter = collections.defaultdict(int)
651 for perm in user_repo_group_perms_from_user_group:
651 for perm in user_repo_group_perms_from_user_group:
652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
655 multiple_counter[g_k] += 1
655 multiple_counter[g_k] += 1
656 p = perm.Permission.permission_name
656 p = perm.Permission.permission_name
657 if perm.RepoGroup.user_id == self.user_id:
657 if perm.RepoGroup.user_id == self.user_id:
658 # set admin if owner, even for member of other user group
658 # set admin if owner, even for member of other user group
659 p = 'group.admin'
659 p = 'group.admin'
660 o = PermOrigin.REPOGROUP_OWNER
660 o = PermOrigin.REPOGROUP_OWNER
661 else:
661 else:
662 if multiple_counter[g_k] > 1:
662 if multiple_counter[g_k] > 1:
663 cur_perm = self.permissions_repository_groups[g_k]
663 cur_perm = self.permissions_repository_groups[g_k]
664 p = self._choose_permission(p, cur_perm)
664 p = self._choose_permission(p, cur_perm)
665 self.permissions_repository_groups[g_k] = p, o
665 self.permissions_repository_groups[g_k] = p, o
666
666
667 # user explicit permissions for repository groups
667 # user explicit permissions for repository groups
668 user_repo_groups_perms = Permission.get_default_group_perms(
668 user_repo_groups_perms = Permission.get_default_group_perms(
669 self.user_id, self.scope_repo_group_id)
669 self.user_id, self.scope_repo_group_id)
670 for perm in user_repo_groups_perms:
670 for perm in user_repo_groups_perms:
671 rg_k = perm.UserRepoGroupToPerm.group.group_name
671 rg_k = perm.UserRepoGroupToPerm.group.group_name
672 u_k = perm.UserRepoGroupToPerm.user.username
672 u_k = perm.UserRepoGroupToPerm.user.username
673 o = PermOrigin.REPOGROUP_USER % u_k
673 o = PermOrigin.REPOGROUP_USER % u_k
674
674
675 if perm.RepoGroup.user_id == self.user_id:
675 if perm.RepoGroup.user_id == self.user_id:
676 # set admin if owner
676 # set admin if owner
677 p = 'group.admin'
677 p = 'group.admin'
678 o = PermOrigin.REPOGROUP_OWNER
678 o = PermOrigin.REPOGROUP_OWNER
679 else:
679 else:
680 p = perm.Permission.permission_name
680 p = perm.Permission.permission_name
681 if not self.explicit:
681 if not self.explicit:
682 cur_perm = self.permissions_repository_groups.get(
682 cur_perm = self.permissions_repository_groups.get(
683 rg_k, 'group.none')
683 rg_k, 'group.none')
684 p = self._choose_permission(p, cur_perm)
684 p = self._choose_permission(p, cur_perm)
685 self.permissions_repository_groups[rg_k] = p, o
685 self.permissions_repository_groups[rg_k] = p, o
686
686
687 def _calculate_user_group_permissions(self):
687 def _calculate_user_group_permissions(self):
688 """
688 """
689 User group permissions for the current user.
689 User group permissions for the current user.
690 """
690 """
691 # user group for user group permissions
691 # user group for user group permissions
692 user_group_from_user_group = Permission\
692 user_group_from_user_group = Permission\
693 .get_default_user_group_perms_from_user_group(
693 .get_default_user_group_perms_from_user_group(
694 self.user_id, self.scope_user_group_id)
694 self.user_id, self.scope_user_group_id)
695
695
696 multiple_counter = collections.defaultdict(int)
696 multiple_counter = collections.defaultdict(int)
697 for perm in user_group_from_user_group:
697 for perm in user_group_from_user_group:
698 g_k = perm.UserGroupUserGroupToPerm\
698 g_k = perm.UserGroupUserGroupToPerm\
699 .target_user_group.users_group_name
699 .target_user_group.users_group_name
700 u_k = perm.UserGroupUserGroupToPerm\
700 u_k = perm.UserGroupUserGroupToPerm\
701 .user_group.users_group_name
701 .user_group.users_group_name
702 o = PermOrigin.USERGROUP_USERGROUP % u_k
702 o = PermOrigin.USERGROUP_USERGROUP % u_k
703 multiple_counter[g_k] += 1
703 multiple_counter[g_k] += 1
704 p = perm.Permission.permission_name
704 p = perm.Permission.permission_name
705
705
706 if perm.UserGroup.user_id == self.user_id:
706 if perm.UserGroup.user_id == self.user_id:
707 # set admin if owner, even for member of other user group
707 # set admin if owner, even for member of other user group
708 p = 'usergroup.admin'
708 p = 'usergroup.admin'
709 o = PermOrigin.USERGROUP_OWNER
709 o = PermOrigin.USERGROUP_OWNER
710 else:
710 else:
711 if multiple_counter[g_k] > 1:
711 if multiple_counter[g_k] > 1:
712 cur_perm = self.permissions_user_groups[g_k]
712 cur_perm = self.permissions_user_groups[g_k]
713 p = self._choose_permission(p, cur_perm)
713 p = self._choose_permission(p, cur_perm)
714 self.permissions_user_groups[g_k] = p, o
714 self.permissions_user_groups[g_k] = p, o
715
715
716 # user explicit permission for user groups
716 # user explicit permission for user groups
717 user_user_groups_perms = Permission.get_default_user_group_perms(
717 user_user_groups_perms = Permission.get_default_user_group_perms(
718 self.user_id, self.scope_user_group_id)
718 self.user_id, self.scope_user_group_id)
719 for perm in user_user_groups_perms:
719 for perm in user_user_groups_perms:
720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
721 u_k = perm.UserUserGroupToPerm.user.username
721 u_k = perm.UserUserGroupToPerm.user.username
722 o = PermOrigin.USERGROUP_USER % u_k
722 o = PermOrigin.USERGROUP_USER % u_k
723
723
724 if perm.UserGroup.user_id == self.user_id:
724 if perm.UserGroup.user_id == self.user_id:
725 # set admin if owner
725 # set admin if owner
726 p = 'usergroup.admin'
726 p = 'usergroup.admin'
727 o = PermOrigin.USERGROUP_OWNER
727 o = PermOrigin.USERGROUP_OWNER
728 else:
728 else:
729 p = perm.Permission.permission_name
729 p = perm.Permission.permission_name
730 if not self.explicit:
730 if not self.explicit:
731 cur_perm = self.permissions_user_groups.get(
731 cur_perm = self.permissions_user_groups.get(
732 ug_k, 'usergroup.none')
732 ug_k, 'usergroup.none')
733 p = self._choose_permission(p, cur_perm)
733 p = self._choose_permission(p, cur_perm)
734 self.permissions_user_groups[ug_k] = p, o
734 self.permissions_user_groups[ug_k] = p, o
735
735
736 def _choose_permission(self, new_perm, cur_perm):
736 def _choose_permission(self, new_perm, cur_perm):
737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
739 if self.algo == 'higherwin':
739 if self.algo == 'higherwin':
740 if new_perm_val > cur_perm_val:
740 if new_perm_val > cur_perm_val:
741 return new_perm
741 return new_perm
742 return cur_perm
742 return cur_perm
743 elif self.algo == 'lowerwin':
743 elif self.algo == 'lowerwin':
744 if new_perm_val < cur_perm_val:
744 if new_perm_val < cur_perm_val:
745 return new_perm
745 return new_perm
746 return cur_perm
746 return cur_perm
747
747
748 def _permission_structure(self):
748 def _permission_structure(self):
749 return {
749 return {
750 'global': self.permissions_global,
750 'global': self.permissions_global,
751 'repositories': self.permissions_repositories,
751 'repositories': self.permissions_repositories,
752 'repositories_groups': self.permissions_repository_groups,
752 'repositories_groups': self.permissions_repository_groups,
753 'user_groups': self.permissions_user_groups,
753 'user_groups': self.permissions_user_groups,
754 }
754 }
755
755
756
756
757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
758 """
758 """
759 Check if given controller_name is in whitelist of auth token access
759 Check if given controller_name is in whitelist of auth token access
760 """
760 """
761 if not whitelist:
761 if not whitelist:
762 from rhodecode import CONFIG
762 from rhodecode import CONFIG
763 whitelist = aslist(
763 whitelist = aslist(
764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
765 log.debug(
765 log.debug(
766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
767
767
768 auth_token_access_valid = False
768 auth_token_access_valid = False
769 for entry in whitelist:
769 for entry in whitelist:
770 if fnmatch.fnmatch(controller_name, entry):
770 if fnmatch.fnmatch(controller_name, entry):
771 auth_token_access_valid = True
771 auth_token_access_valid = True
772 break
772 break
773
773
774 if auth_token_access_valid:
774 if auth_token_access_valid:
775 log.debug('controller:%s matches entry in whitelist'
775 log.debug('controller:%s matches entry in whitelist'
776 % (controller_name,))
776 % (controller_name,))
777 else:
777 else:
778 msg = ('controller: %s does *NOT* match any entry in whitelist'
778 msg = ('controller: %s does *NOT* match any entry in whitelist'
779 % (controller_name,))
779 % (controller_name,))
780 if auth_token:
780 if auth_token:
781 # if we use auth token key and don't have access it's a warning
781 # if we use auth token key and don't have access it's a warning
782 log.warning(msg)
782 log.warning(msg)
783 else:
783 else:
784 log.debug(msg)
784 log.debug(msg)
785
785
786 return auth_token_access_valid
786 return auth_token_access_valid
787
787
788
788
789 class AuthUser(object):
789 class AuthUser(object):
790 """
790 """
791 A simple object that handles all attributes of user in RhodeCode
791 A simple object that handles all attributes of user in RhodeCode
792
792
793 It does lookup based on API key,given user, or user present in session
793 It does lookup based on API key,given user, or user present in session
794 Then it fills all required information for such user. It also checks if
794 Then it fills all required information for such user. It also checks if
795 anonymous access is enabled and if so, it returns default user as logged in
795 anonymous access is enabled and if so, it returns default user as logged in
796 """
796 """
797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
798
798
799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
800
800
801 self.user_id = user_id
801 self.user_id = user_id
802 self._api_key = api_key
802 self._api_key = api_key
803
803
804 self.api_key = None
804 self.api_key = None
805 self.feed_token = ''
805 self.feed_token = ''
806 self.username = username
806 self.username = username
807 self.ip_addr = ip_addr
807 self.ip_addr = ip_addr
808 self.name = ''
808 self.name = ''
809 self.lastname = ''
809 self.lastname = ''
810 self.first_name = ''
810 self.first_name = ''
811 self.last_name = ''
811 self.last_name = ''
812 self.email = ''
812 self.email = ''
813 self.is_authenticated = False
813 self.is_authenticated = False
814 self.admin = False
814 self.admin = False
815 self.inherit_default_permissions = False
815 self.inherit_default_permissions = False
816 self.password = ''
816 self.password = ''
817
817
818 self.anonymous_user = None # propagated on propagate_data
818 self.anonymous_user = None # propagated on propagate_data
819 self.propagate_data()
819 self.propagate_data()
820 self._instance = None
820 self._instance = None
821 self._permissions_scoped_cache = {} # used to bind scoped calculation
821 self._permissions_scoped_cache = {} # used to bind scoped calculation
822
822
823 @LazyProperty
823 @LazyProperty
824 def permissions(self):
824 def permissions(self):
825 return self.get_perms(user=self, cache=False)
825 return self.get_perms(user=self, cache=False)
826
826
827 def permissions_with_scope(self, scope):
827 def permissions_with_scope(self, scope):
828 """
828 """
829 Call the get_perms function with scoped data. The scope in that function
829 Call the get_perms function with scoped data. The scope in that function
830 narrows the SQL calls to the given ID of objects resulting in fetching
830 narrows the SQL calls to the given ID of objects resulting in fetching
831 Just particular permission we want to obtain. If scope is an empty dict
831 Just particular permission we want to obtain. If scope is an empty dict
832 then it basically narrows the scope to GLOBAL permissions only.
832 then it basically narrows the scope to GLOBAL permissions only.
833
833
834 :param scope: dict
834 :param scope: dict
835 """
835 """
836 if 'repo_name' in scope:
836 if 'repo_name' in scope:
837 obj = Repository.get_by_repo_name(scope['repo_name'])
837 obj = Repository.get_by_repo_name(scope['repo_name'])
838 if obj:
838 if obj:
839 scope['repo_id'] = obj.repo_id
839 scope['repo_id'] = obj.repo_id
840 _scope = {
840 _scope = {
841 'repo_id': -1,
841 'repo_id': -1,
842 'user_group_id': -1,
842 'user_group_id': -1,
843 'repo_group_id': -1,
843 'repo_group_id': -1,
844 }
844 }
845 _scope.update(scope)
845 _scope.update(scope)
846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
847 _scope.items())))
847 _scope.items())))
848 if cache_key not in self._permissions_scoped_cache:
848 if cache_key not in self._permissions_scoped_cache:
849 # store in cache to mimic how the @LazyProperty works,
849 # store in cache to mimic how the @LazyProperty works,
850 # the difference here is that we use the unique key calculated
850 # the difference here is that we use the unique key calculated
851 # from params and values
851 # from params and values
852 res = self.get_perms(user=self, cache=False, scope=_scope)
852 res = self.get_perms(user=self, cache=False, scope=_scope)
853 self._permissions_scoped_cache[cache_key] = res
853 self._permissions_scoped_cache[cache_key] = res
854 return self._permissions_scoped_cache[cache_key]
854 return self._permissions_scoped_cache[cache_key]
855
855
856 def get_instance(self):
856 def get_instance(self):
857 return User.get(self.user_id)
857 return User.get(self.user_id)
858
858
859 def update_lastactivity(self):
859 def update_lastactivity(self):
860 if self.user_id:
860 if self.user_id:
861 User.get(self.user_id).update_lastactivity()
861 User.get(self.user_id).update_lastactivity()
862
862
863 def propagate_data(self):
863 def propagate_data(self):
864 """
864 """
865 Fills in user data and propagates values to this instance. Maps fetched
865 Fills in user data and propagates values to this instance. Maps fetched
866 user attributes to this class instance attributes
866 user attributes to this class instance attributes
867 """
867 """
868 log.debug('starting data propagation for new potential AuthUser')
868 log.debug('starting data propagation for new potential AuthUser')
869 user_model = UserModel()
869 user_model = UserModel()
870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
871 is_user_loaded = False
871 is_user_loaded = False
872
872
873 # lookup by userid
873 # lookup by userid
874 if self.user_id is not None and self.user_id != anon_user.user_id:
874 if self.user_id is not None and self.user_id != anon_user.user_id:
875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
877
877
878 # try go get user by api key
878 # try go get user by api key
879 elif self._api_key and self._api_key != anon_user.api_key:
879 elif self._api_key and self._api_key != anon_user.api_key:
880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
882
882
883 # lookup by username
883 # lookup by username
884 elif self.username:
884 elif self.username:
885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
886 is_user_loaded = user_model.fill_data(self, username=self.username)
886 is_user_loaded = user_model.fill_data(self, username=self.username)
887 else:
887 else:
888 log.debug('No data in %s that could been used to log in' % self)
888 log.debug('No data in %s that could been used to log in' % self)
889
889
890 if not is_user_loaded:
890 if not is_user_loaded:
891 log.debug('Failed to load user. Fallback to default user')
891 log.debug('Failed to load user. Fallback to default user')
892 # if we cannot authenticate user try anonymous
892 # if we cannot authenticate user try anonymous
893 if anon_user.active:
893 if anon_user.active:
894 user_model.fill_data(self, user_id=anon_user.user_id)
894 user_model.fill_data(self, user_id=anon_user.user_id)
895 # then we set this user is logged in
895 # then we set this user is logged in
896 self.is_authenticated = True
896 self.is_authenticated = True
897 else:
897 else:
898 # in case of disabled anonymous user we reset some of the
898 # in case of disabled anonymous user we reset some of the
899 # parameters so such user is "corrupted", skipping the fill_data
899 # parameters so such user is "corrupted", skipping the fill_data
900 for attr in ['user_id', 'username', 'admin', 'active']:
900 for attr in ['user_id', 'username', 'admin', 'active']:
901 setattr(self, attr, None)
901 setattr(self, attr, None)
902 self.is_authenticated = False
902 self.is_authenticated = False
903
903
904 if not self.username:
904 if not self.username:
905 self.username = 'None'
905 self.username = 'None'
906
906
907 log.debug('Auth User is now %s' % self)
907 log.debug('Auth User is now %s' % self)
908
908
909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
910 cache=False):
910 cache=False):
911 """
911 """
912 Fills user permission attribute with permissions taken from database
912 Fills user permission attribute with permissions taken from database
913 works for permissions given for repositories, and for permissions that
913 works for permissions given for repositories, and for permissions that
914 are granted to groups
914 are granted to groups
915
915
916 :param user: instance of User object from database
916 :param user: instance of User object from database
917 :param explicit: In case there are permissions both for user and a group
917 :param explicit: In case there are permissions both for user and a group
918 that user is part of, explicit flag will defiine if user will
918 that user is part of, explicit flag will defiine if user will
919 explicitly override permissions from group, if it's False it will
919 explicitly override permissions from group, if it's False it will
920 make decision based on the algo
920 make decision based on the algo
921 :param algo: algorithm to decide what permission should be choose if
921 :param algo: algorithm to decide what permission should be choose if
922 it's multiple defined, eg user in two different groups. It also
922 it's multiple defined, eg user in two different groups. It also
923 decides if explicit flag is turned off how to specify the permission
923 decides if explicit flag is turned off how to specify the permission
924 for case when user is in a group + have defined separate permission
924 for case when user is in a group + have defined separate permission
925 """
925 """
926 user_id = user.user_id
926 user_id = user.user_id
927 user_is_admin = user.is_admin
927 user_is_admin = user.is_admin
928
928
929 # inheritance of global permissions like create repo/fork repo etc
929 # inheritance of global permissions like create repo/fork repo etc
930 user_inherit_default_permissions = user.inherit_default_permissions
930 user_inherit_default_permissions = user.inherit_default_permissions
931
931
932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
933 compute = caches.conditional_cache(
933 compute = caches.conditional_cache(
934 'short_term', 'cache_desc',
934 'short_term', 'cache_desc',
935 condition=cache, func=_cached_perms_data)
935 condition=cache, func=_cached_perms_data)
936 result = compute(user_id, scope, user_is_admin,
936 result = compute(user_id, scope, user_is_admin,
937 user_inherit_default_permissions, explicit, algo)
937 user_inherit_default_permissions, explicit, algo)
938
938
939 result_repr = []
939 result_repr = []
940 for k in result:
940 for k in result:
941 result_repr.append((k, len(result[k])))
941 result_repr.append((k, len(result[k])))
942
942
943 log.debug('PERMISSION tree computed %s' % (result_repr,))
943 log.debug('PERMISSION tree computed %s' % (result_repr,))
944 return result
944 return result
945
945
946 @property
946 @property
947 def is_default(self):
947 def is_default(self):
948 return self.username == User.DEFAULT_USER
948 return self.username == User.DEFAULT_USER
949
949
950 @property
950 @property
951 def is_admin(self):
951 def is_admin(self):
952 return self.admin
952 return self.admin
953
953
954 @property
954 @property
955 def is_user_object(self):
955 def is_user_object(self):
956 return self.user_id is not None
956 return self.user_id is not None
957
957
958 @property
958 @property
959 def repositories_admin(self):
959 def repositories_admin(self):
960 """
960 """
961 Returns list of repositories you're an admin of
961 Returns list of repositories you're an admin of
962 """
962 """
963 return [
963 return [
964 x[0] for x in self.permissions['repositories'].iteritems()
964 x[0] for x in self.permissions['repositories'].iteritems()
965 if x[1] == 'repository.admin']
965 if x[1] == 'repository.admin']
966
966
967 @property
967 @property
968 def repository_groups_admin(self):
968 def repository_groups_admin(self):
969 """
969 """
970 Returns list of repository groups you're an admin of
970 Returns list of repository groups you're an admin of
971 """
971 """
972 return [
972 return [
973 x[0] for x in self.permissions['repositories_groups'].iteritems()
973 x[0] for x in self.permissions['repositories_groups'].iteritems()
974 if x[1] == 'group.admin']
974 if x[1] == 'group.admin']
975
975
976 @property
976 @property
977 def user_groups_admin(self):
977 def user_groups_admin(self):
978 """
978 """
979 Returns list of user groups you're an admin of
979 Returns list of user groups you're an admin of
980 """
980 """
981 return [
981 return [
982 x[0] for x in self.permissions['user_groups'].iteritems()
982 x[0] for x in self.permissions['user_groups'].iteritems()
983 if x[1] == 'usergroup.admin']
983 if x[1] == 'usergroup.admin']
984
984
985 @property
985 @property
986 def ip_allowed(self):
986 def ip_allowed(self):
987 """
987 """
988 Checks if ip_addr used in constructor is allowed from defined list of
988 Checks if ip_addr used in constructor is allowed from defined list of
989 allowed ip_addresses for user
989 allowed ip_addresses for user
990
990
991 :returns: boolean, True if ip is in allowed ip range
991 :returns: boolean, True if ip is in allowed ip range
992 """
992 """
993 # check IP
993 # check IP
994 inherit = self.inherit_default_permissions
994 inherit = self.inherit_default_permissions
995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
996 inherit_from_default=inherit)
996 inherit_from_default=inherit)
997 @property
997 @property
998 def personal_repo_group(self):
998 def personal_repo_group(self):
999 return RepoGroup.get_user_personal_repo_group(self.user_id)
999 return RepoGroup.get_user_personal_repo_group(self.user_id)
1000
1000
1001 @classmethod
1001 @classmethod
1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1003 allowed_ips = AuthUser.get_allowed_ips(
1003 allowed_ips = AuthUser.get_allowed_ips(
1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1007 return True
1007 return True
1008 else:
1008 else:
1009 log.info('Access for IP:%s forbidden, '
1009 log.info('Access for IP:%s forbidden, '
1010 'not in %s' % (ip_addr, allowed_ips))
1010 'not in %s' % (ip_addr, allowed_ips))
1011 return False
1011 return False
1012
1012
1013 def __repr__(self):
1013 def __repr__(self):
1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1016
1016
1017 def set_authenticated(self, authenticated=True):
1017 def set_authenticated(self, authenticated=True):
1018 if self.user_id != self.anonymous_user.user_id:
1018 if self.user_id != self.anonymous_user.user_id:
1019 self.is_authenticated = authenticated
1019 self.is_authenticated = authenticated
1020
1020
1021 def get_cookie_store(self):
1021 def get_cookie_store(self):
1022 return {
1022 return {
1023 'username': self.username,
1023 'username': self.username,
1024 'password': md5(self.password),
1024 'password': md5(self.password),
1025 'user_id': self.user_id,
1025 'user_id': self.user_id,
1026 'is_authenticated': self.is_authenticated
1026 'is_authenticated': self.is_authenticated
1027 }
1027 }
1028
1028
1029 @classmethod
1029 @classmethod
1030 def from_cookie_store(cls, cookie_store):
1030 def from_cookie_store(cls, cookie_store):
1031 """
1031 """
1032 Creates AuthUser from a cookie store
1032 Creates AuthUser from a cookie store
1033
1033
1034 :param cls:
1034 :param cls:
1035 :param cookie_store:
1035 :param cookie_store:
1036 """
1036 """
1037 user_id = cookie_store.get('user_id')
1037 user_id = cookie_store.get('user_id')
1038 username = cookie_store.get('username')
1038 username = cookie_store.get('username')
1039 api_key = cookie_store.get('api_key')
1039 api_key = cookie_store.get('api_key')
1040 return AuthUser(user_id, api_key, username)
1040 return AuthUser(user_id, api_key, username)
1041
1041
1042 @classmethod
1042 @classmethod
1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1044 _set = set()
1044 _set = set()
1045
1045
1046 if inherit_from_default:
1046 if inherit_from_default:
1047 default_ips = UserIpMap.query().filter(
1047 default_ips = UserIpMap.query().filter(
1048 UserIpMap.user == User.get_default_user(cache=True))
1048 UserIpMap.user == User.get_default_user(cache=True))
1049 if cache:
1049 if cache:
1050 default_ips = default_ips.options(
1050 default_ips = default_ips.options(
1051 FromCache("sql_cache_short", "get_user_ips_default"))
1051 FromCache("sql_cache_short", "get_user_ips_default"))
1052
1052
1053 # populate from default user
1053 # populate from default user
1054 for ip in default_ips:
1054 for ip in default_ips:
1055 try:
1055 try:
1056 _set.add(ip.ip_addr)
1056 _set.add(ip.ip_addr)
1057 except ObjectDeletedError:
1057 except ObjectDeletedError:
1058 # since we use heavy caching sometimes it happens that
1058 # since we use heavy caching sometimes it happens that
1059 # we get deleted objects here, we just skip them
1059 # we get deleted objects here, we just skip them
1060 pass
1060 pass
1061
1061
1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1063 if cache:
1063 if cache:
1064 user_ips = user_ips.options(
1064 user_ips = user_ips.options(
1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1066
1066
1067 for ip in user_ips:
1067 for ip in user_ips:
1068 try:
1068 try:
1069 _set.add(ip.ip_addr)
1069 _set.add(ip.ip_addr)
1070 except ObjectDeletedError:
1070 except ObjectDeletedError:
1071 # since we use heavy caching sometimes it happens that we get
1071 # since we use heavy caching sometimes it happens that we get
1072 # deleted objects here, we just skip them
1072 # deleted objects here, we just skip them
1073 pass
1073 pass
1074 return _set or set(['0.0.0.0/0', '::/0'])
1074 return _set or set(['0.0.0.0/0', '::/0'])
1075
1075
1076
1076
1077 def set_available_permissions(config):
1077 def set_available_permissions(config):
1078 """
1078 """
1079 This function will propagate pylons globals with all available defined
1079 This function will propagate pylons globals with all available defined
1080 permission given in db. We don't want to check each time from db for new
1080 permission given in db. We don't want to check each time from db for new
1081 permissions since adding a new permission also requires application restart
1081 permissions since adding a new permission also requires application restart
1082 ie. to decorate new views with the newly created permission
1082 ie. to decorate new views with the newly created permission
1083
1083
1084 :param config: current pylons config instance
1084 :param config: current pylons config instance
1085
1085
1086 """
1086 """
1087 log.info('getting information about all available permissions')
1087 log.info('getting information about all available permissions')
1088 try:
1088 try:
1089 sa = meta.Session
1089 sa = meta.Session
1090 all_perms = sa.query(Permission).all()
1090 all_perms = sa.query(Permission).all()
1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1092 except Exception:
1092 except Exception:
1093 log.error(traceback.format_exc())
1093 log.error(traceback.format_exc())
1094 finally:
1094 finally:
1095 meta.Session.remove()
1095 meta.Session.remove()
1096
1096
1097
1097
1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1099 """
1099 """
1100 Return the current authentication token, creating one if one doesn't
1100 Return the current authentication token, creating one if one doesn't
1101 already exist and the save_if_missing flag is present.
1101 already exist and the save_if_missing flag is present.
1102
1102
1103 :param session: pass in the pylons session, else we use the global ones
1103 :param session: pass in the pylons session, else we use the global ones
1104 :param force_new: force to re-generate the token and store it in session
1104 :param force_new: force to re-generate the token and store it in session
1105 :param save_if_missing: save the newly generated token if it's missing in
1105 :param save_if_missing: save the newly generated token if it's missing in
1106 session
1106 session
1107 """
1107 """
1108 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1109 # from pyramid.csrf import get_csrf_token
1110
1108 if not session:
1111 if not session:
1109 from pylons import session
1112 from pylons import session
1110
1113
1111 if (csrf_token_key not in session and save_if_missing) or force_new:
1114 if (csrf_token_key not in session and save_if_missing) or force_new:
1112 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1115 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1113 session[csrf_token_key] = token
1116 session[csrf_token_key] = token
1114 if hasattr(session, 'save'):
1117 if hasattr(session, 'save'):
1115 session.save()
1118 session.save()
1116 return session.get(csrf_token_key)
1119 return session.get(csrf_token_key)
1117
1120
1118
1121
1119 def get_request(perm_class):
1122 def get_request(perm_class):
1120 from pyramid.threadlocal import get_current_request
1123 from pyramid.threadlocal import get_current_request
1121 pyramid_request = get_current_request()
1124 pyramid_request = get_current_request()
1122 if not pyramid_request:
1125 if not pyramid_request:
1123 # return global request of pylons in case pyramid isn't available
1126 # return global request of pylons in case pyramid isn't available
1124 # NOTE(marcink): this should be removed after migration to pyramid
1127 # NOTE(marcink): this should be removed after migration to pyramid
1125 from pylons import request
1128 from pylons import request
1126 return request
1129 return request
1127 return pyramid_request
1130 return pyramid_request
1128
1131
1129
1132
1130 # CHECK DECORATORS
1133 # CHECK DECORATORS
1131 class CSRFRequired(object):
1134 class CSRFRequired(object):
1132 """
1135 """
1133 Decorator for authenticating a form
1136 Decorator for authenticating a form
1134
1137
1135 This decorator uses an authorization token stored in the client's
1138 This decorator uses an authorization token stored in the client's
1136 session for prevention of certain Cross-site request forgery (CSRF)
1139 session for prevention of certain Cross-site request forgery (CSRF)
1137 attacks (See
1140 attacks (See
1138 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1141 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1139 information).
1142 information).
1140
1143
1141 For use with the ``webhelpers.secure_form`` helper functions.
1144 For use with the ``webhelpers.secure_form`` helper functions.
1142
1145
1143 """
1146 """
1144 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1147 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1145 except_methods=None):
1148 except_methods=None):
1146 self.token = token
1149 self.token = token
1147 self.header = header
1150 self.header = header
1148 self.except_methods = except_methods or []
1151 self.except_methods = except_methods or []
1149
1152
1150 def __call__(self, func):
1153 def __call__(self, func):
1151 return get_cython_compat_decorator(self.__wrapper, func)
1154 return get_cython_compat_decorator(self.__wrapper, func)
1152
1155
1153 def _get_csrf(self, _request):
1156 def _get_csrf(self, _request):
1154 return _request.POST.get(self.token, _request.headers.get(self.header))
1157 return _request.POST.get(self.token, _request.headers.get(self.header))
1155
1158
1156 def check_csrf(self, _request, cur_token):
1159 def check_csrf(self, _request, cur_token):
1157 supplied_token = self._get_csrf(_request)
1160 supplied_token = self._get_csrf(_request)
1158 return supplied_token and supplied_token == cur_token
1161 return supplied_token and supplied_token == cur_token
1159
1162
1160 def _get_request(self):
1163 def _get_request(self):
1161 return get_request(self)
1164 return get_request(self)
1162
1165
1163 def __wrapper(self, func, *fargs, **fkwargs):
1166 def __wrapper(self, func, *fargs, **fkwargs):
1164 request = self._get_request()
1167 request = self._get_request()
1165
1168
1166 if request.method in self.except_methods:
1169 if request.method in self.except_methods:
1167 return func(*fargs, **fkwargs)
1170 return func(*fargs, **fkwargs)
1168
1171
1169 cur_token = get_csrf_token(save_if_missing=False)
1172 cur_token = get_csrf_token(save_if_missing=False)
1170 if self.check_csrf(request, cur_token):
1173 if self.check_csrf(request, cur_token):
1171 if request.POST.get(self.token):
1174 if request.POST.get(self.token):
1172 del request.POST[self.token]
1175 del request.POST[self.token]
1173 return func(*fargs, **fkwargs)
1176 return func(*fargs, **fkwargs)
1174 else:
1177 else:
1175 reason = 'token-missing'
1178 reason = 'token-missing'
1176 supplied_token = self._get_csrf(request)
1179 supplied_token = self._get_csrf(request)
1177 if supplied_token and cur_token != supplied_token:
1180 if supplied_token and cur_token != supplied_token:
1178 reason = 'token-mismatch [%s:%s]' % (
1181 reason = 'token-mismatch [%s:%s]' % (
1179 cur_token or ''[:6], supplied_token or ''[:6])
1182 cur_token or ''[:6], supplied_token or ''[:6])
1180
1183
1181 csrf_message = \
1184 csrf_message = \
1182 ("Cross-site request forgery detected, request denied. See "
1185 ("Cross-site request forgery detected, request denied. See "
1183 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1186 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1184 "more information.")
1187 "more information.")
1185 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1188 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1186 'REMOTE_ADDR:%s, HEADERS:%s' % (
1189 'REMOTE_ADDR:%s, HEADERS:%s' % (
1187 request, reason, request.remote_addr, request.headers))
1190 request, reason, request.remote_addr, request.headers))
1188
1191
1189 raise HTTPForbidden(explanation=csrf_message)
1192 raise HTTPForbidden(explanation=csrf_message)
1190
1193
1191
1194
1192 class LoginRequired(object):
1195 class LoginRequired(object):
1193 """
1196 """
1194 Must be logged in to execute this function else
1197 Must be logged in to execute this function else
1195 redirect to login page
1198 redirect to login page
1196
1199
1197 :param api_access: if enabled this checks only for valid auth token
1200 :param api_access: if enabled this checks only for valid auth token
1198 and grants access based on valid token
1201 and grants access based on valid token
1199 """
1202 """
1200 def __init__(self, auth_token_access=None):
1203 def __init__(self, auth_token_access=None):
1201 self.auth_token_access = auth_token_access
1204 self.auth_token_access = auth_token_access
1202
1205
1203 def __call__(self, func):
1206 def __call__(self, func):
1204 return get_cython_compat_decorator(self.__wrapper, func)
1207 return get_cython_compat_decorator(self.__wrapper, func)
1205
1208
1206 def _get_request(self):
1209 def _get_request(self):
1207 return get_request(self)
1210 return get_request(self)
1208
1211
1209 def __wrapper(self, func, *fargs, **fkwargs):
1212 def __wrapper(self, func, *fargs, **fkwargs):
1210 from rhodecode.lib import helpers as h
1213 from rhodecode.lib import helpers as h
1211 cls = fargs[0]
1214 cls = fargs[0]
1212 user = cls._rhodecode_user
1215 user = cls._rhodecode_user
1213 request = self._get_request()
1216 request = self._get_request()
1214
1217
1215 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1218 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1216 log.debug('Starting login restriction checks for user: %s' % (user,))
1219 log.debug('Starting login restriction checks for user: %s' % (user,))
1217 # check if our IP is allowed
1220 # check if our IP is allowed
1218 ip_access_valid = True
1221 ip_access_valid = True
1219 if not user.ip_allowed:
1222 if not user.ip_allowed:
1220 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1223 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1221 category='warning')
1224 category='warning')
1222 ip_access_valid = False
1225 ip_access_valid = False
1223
1226
1224 # check if we used an APIKEY and it's a valid one
1227 # check if we used an APIKEY and it's a valid one
1225 # defined white-list of controllers which API access will be enabled
1228 # defined white-list of controllers which API access will be enabled
1226 _auth_token = request.GET.get(
1229 _auth_token = request.GET.get(
1227 'auth_token', '') or request.GET.get('api_key', '')
1230 'auth_token', '') or request.GET.get('api_key', '')
1228 auth_token_access_valid = allowed_auth_token_access(
1231 auth_token_access_valid = allowed_auth_token_access(
1229 loc, auth_token=_auth_token)
1232 loc, auth_token=_auth_token)
1230
1233
1231 # explicit controller is enabled or API is in our whitelist
1234 # explicit controller is enabled or API is in our whitelist
1232 if self.auth_token_access or auth_token_access_valid:
1235 if self.auth_token_access or auth_token_access_valid:
1233 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1236 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1234 db_user = user.get_instance()
1237 db_user = user.get_instance()
1235
1238
1236 if db_user:
1239 if db_user:
1237 if self.auth_token_access:
1240 if self.auth_token_access:
1238 roles = self.auth_token_access
1241 roles = self.auth_token_access
1239 else:
1242 else:
1240 roles = [UserApiKeys.ROLE_HTTP]
1243 roles = [UserApiKeys.ROLE_HTTP]
1241 token_match = db_user.authenticate_by_token(
1244 token_match = db_user.authenticate_by_token(
1242 _auth_token, roles=roles)
1245 _auth_token, roles=roles)
1243 else:
1246 else:
1244 log.debug('Unable to fetch db instance for auth user: %s', user)
1247 log.debug('Unable to fetch db instance for auth user: %s', user)
1245 token_match = False
1248 token_match = False
1246
1249
1247 if _auth_token and token_match:
1250 if _auth_token and token_match:
1248 auth_token_access_valid = True
1251 auth_token_access_valid = True
1249 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1252 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1250 else:
1253 else:
1251 auth_token_access_valid = False
1254 auth_token_access_valid = False
1252 if not _auth_token:
1255 if not _auth_token:
1253 log.debug("AUTH TOKEN *NOT* present in request")
1256 log.debug("AUTH TOKEN *NOT* present in request")
1254 else:
1257 else:
1255 log.warning(
1258 log.warning(
1256 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1259 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1257
1260
1258 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1261 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1259 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1262 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1260 else 'AUTH_TOKEN_AUTH'
1263 else 'AUTH_TOKEN_AUTH'
1261
1264
1262 if ip_access_valid and (
1265 if ip_access_valid and (
1263 user.is_authenticated or auth_token_access_valid):
1266 user.is_authenticated or auth_token_access_valid):
1264 log.info(
1267 log.info(
1265 'user %s authenticating with:%s IS authenticated on func %s'
1268 'user %s authenticating with:%s IS authenticated on func %s'
1266 % (user, reason, loc))
1269 % (user, reason, loc))
1267
1270
1268 # update user data to check last activity
1271 # update user data to check last activity
1269 user.update_lastactivity()
1272 user.update_lastactivity()
1270 Session().commit()
1273 Session().commit()
1271 return func(*fargs, **fkwargs)
1274 return func(*fargs, **fkwargs)
1272 else:
1275 else:
1273 log.warning(
1276 log.warning(
1274 'user %s authenticating with:%s NOT authenticated on '
1277 'user %s authenticating with:%s NOT authenticated on '
1275 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1278 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1276 % (user, reason, loc, ip_access_valid,
1279 % (user, reason, loc, ip_access_valid,
1277 auth_token_access_valid))
1280 auth_token_access_valid))
1278 # we preserve the get PARAM
1281 # we preserve the get PARAM
1279 came_from = request.path_qs
1282 came_from = request.path_qs
1280 log.debug('redirecting to login page with %s' % (came_from,))
1283 log.debug('redirecting to login page with %s' % (came_from,))
1281 raise HTTPFound(
1284 raise HTTPFound(
1282 h.route_path('login', _query={'came_from': came_from}))
1285 h.route_path('login', _query={'came_from': came_from}))
1283
1286
1284
1287
1285 class NotAnonymous(object):
1288 class NotAnonymous(object):
1286 """
1289 """
1287 Must be logged in to execute this function else
1290 Must be logged in to execute this function else
1288 redirect to login page
1291 redirect to login page
1289 """
1292 """
1290
1293
1291 def __call__(self, func):
1294 def __call__(self, func):
1292 return get_cython_compat_decorator(self.__wrapper, func)
1295 return get_cython_compat_decorator(self.__wrapper, func)
1293
1296
1294 def _get_request(self):
1297 def _get_request(self):
1295 return get_request(self)
1298 return get_request(self)
1296
1299
1297 def __wrapper(self, func, *fargs, **fkwargs):
1300 def __wrapper(self, func, *fargs, **fkwargs):
1298 import rhodecode.lib.helpers as h
1301 import rhodecode.lib.helpers as h
1299 cls = fargs[0]
1302 cls = fargs[0]
1300 self.user = cls._rhodecode_user
1303 self.user = cls._rhodecode_user
1301 request = self._get_request()
1304 request = self._get_request()
1302
1305
1303 log.debug('Checking if user is not anonymous @%s' % cls)
1306 log.debug('Checking if user is not anonymous @%s' % cls)
1304
1307
1305 anonymous = self.user.username == User.DEFAULT_USER
1308 anonymous = self.user.username == User.DEFAULT_USER
1306
1309
1307 if anonymous:
1310 if anonymous:
1308 came_from = request.path_qs
1311 came_from = request.path_qs
1309 h.flash(_('You need to be a registered user to '
1312 h.flash(_('You need to be a registered user to '
1310 'perform this action'),
1313 'perform this action'),
1311 category='warning')
1314 category='warning')
1312 raise HTTPFound(
1315 raise HTTPFound(
1313 h.route_path('login', _query={'came_from': came_from}))
1316 h.route_path('login', _query={'came_from': came_from}))
1314 else:
1317 else:
1315 return func(*fargs, **fkwargs)
1318 return func(*fargs, **fkwargs)
1316
1319
1317
1320
1318 class XHRRequired(object):
1321 class XHRRequired(object):
1319 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1322 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1320
1323
1321 def __call__(self, func):
1324 def __call__(self, func):
1322 return get_cython_compat_decorator(self.__wrapper, func)
1325 return get_cython_compat_decorator(self.__wrapper, func)
1323
1326
1324 def _get_request(self):
1327 def _get_request(self):
1325 return get_request(self)
1328 return get_request(self)
1326
1329
1327 def __wrapper(self, func, *fargs, **fkwargs):
1330 def __wrapper(self, func, *fargs, **fkwargs):
1328 from pylons.controllers.util import abort
1331 from pylons.controllers.util import abort
1329 request = self._get_request()
1332 request = self._get_request()
1330
1333
1331 log.debug('Checking if request is XMLHttpRequest (XHR)')
1334 log.debug('Checking if request is XMLHttpRequest (XHR)')
1332 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1335 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1333
1336
1334 if not request.is_xhr:
1337 if not request.is_xhr:
1335 abort(400, detail=xhr_message)
1338 abort(400, detail=xhr_message)
1336
1339
1337 return func(*fargs, **fkwargs)
1340 return func(*fargs, **fkwargs)
1338
1341
1339
1342
1340 class HasAcceptedRepoType(object):
1343 class HasAcceptedRepoType(object):
1341 """
1344 """
1342 Check if requested repo is within given repo type aliases
1345 Check if requested repo is within given repo type aliases
1343 """
1346 """
1344
1347
1345 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1348 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1346
1349
1347 def __init__(self, *repo_type_list):
1350 def __init__(self, *repo_type_list):
1348 self.repo_type_list = set(repo_type_list)
1351 self.repo_type_list = set(repo_type_list)
1349
1352
1350 def __call__(self, func):
1353 def __call__(self, func):
1351 return get_cython_compat_decorator(self.__wrapper, func)
1354 return get_cython_compat_decorator(self.__wrapper, func)
1352
1355
1353 def __wrapper(self, func, *fargs, **fkwargs):
1356 def __wrapper(self, func, *fargs, **fkwargs):
1354 import rhodecode.lib.helpers as h
1357 import rhodecode.lib.helpers as h
1355 cls = fargs[0]
1358 cls = fargs[0]
1356 rhodecode_repo = cls.rhodecode_repo
1359 rhodecode_repo = cls.rhodecode_repo
1357
1360
1358 log.debug('%s checking repo type for %s in %s',
1361 log.debug('%s checking repo type for %s in %s',
1359 self.__class__.__name__,
1362 self.__class__.__name__,
1360 rhodecode_repo.alias, self.repo_type_list)
1363 rhodecode_repo.alias, self.repo_type_list)
1361
1364
1362 if rhodecode_repo.alias in self.repo_type_list:
1365 if rhodecode_repo.alias in self.repo_type_list:
1363 return func(*fargs, **fkwargs)
1366 return func(*fargs, **fkwargs)
1364 else:
1367 else:
1365 h.flash(h.literal(
1368 h.flash(h.literal(
1366 _('Action not supported for %s.' % rhodecode_repo.alias)),
1369 _('Action not supported for %s.' % rhodecode_repo.alias)),
1367 category='warning')
1370 category='warning')
1368 raise HTTPFound(
1371 raise HTTPFound(
1369 h.route_path('repo_summary',
1372 h.route_path('repo_summary',
1370 repo_name=cls.rhodecode_db_repo.repo_name))
1373 repo_name=cls.rhodecode_db_repo.repo_name))
1371
1374
1372
1375
1373 class PermsDecorator(object):
1376 class PermsDecorator(object):
1374 """
1377 """
1375 Base class for controller decorators, we extract the current user from
1378 Base class for controller decorators, we extract the current user from
1376 the class itself, which has it stored in base controllers
1379 the class itself, which has it stored in base controllers
1377 """
1380 """
1378
1381
1379 def __init__(self, *required_perms):
1382 def __init__(self, *required_perms):
1380 self.required_perms = set(required_perms)
1383 self.required_perms = set(required_perms)
1381
1384
1382 def __call__(self, func):
1385 def __call__(self, func):
1383 return get_cython_compat_decorator(self.__wrapper, func)
1386 return get_cython_compat_decorator(self.__wrapper, func)
1384
1387
1385 def _get_request(self):
1388 def _get_request(self):
1386 return get_request(self)
1389 return get_request(self)
1387
1390
1388 def _get_came_from(self):
1391 def _get_came_from(self):
1389 _request = self._get_request()
1392 _request = self._get_request()
1390
1393
1391 # both pylons/pyramid has this attribute
1394 # both pylons/pyramid has this attribute
1392 return _request.path_qs
1395 return _request.path_qs
1393
1396
1394 def __wrapper(self, func, *fargs, **fkwargs):
1397 def __wrapper(self, func, *fargs, **fkwargs):
1395 import rhodecode.lib.helpers as h
1398 import rhodecode.lib.helpers as h
1396 cls = fargs[0]
1399 cls = fargs[0]
1397 _user = cls._rhodecode_user
1400 _user = cls._rhodecode_user
1398
1401
1399 log.debug('checking %s permissions %s for %s %s',
1402 log.debug('checking %s permissions %s for %s %s',
1400 self.__class__.__name__, self.required_perms, cls, _user)
1403 self.__class__.__name__, self.required_perms, cls, _user)
1401
1404
1402 if self.check_permissions(_user):
1405 if self.check_permissions(_user):
1403 log.debug('Permission granted for %s %s', cls, _user)
1406 log.debug('Permission granted for %s %s', cls, _user)
1404 return func(*fargs, **fkwargs)
1407 return func(*fargs, **fkwargs)
1405
1408
1406 else:
1409 else:
1407 log.debug('Permission denied for %s %s', cls, _user)
1410 log.debug('Permission denied for %s %s', cls, _user)
1408 anonymous = _user.username == User.DEFAULT_USER
1411 anonymous = _user.username == User.DEFAULT_USER
1409
1412
1410 if anonymous:
1413 if anonymous:
1411 came_from = self._get_came_from()
1414 came_from = self._get_came_from()
1412 h.flash(_('You need to be signed in to view this page'),
1415 h.flash(_('You need to be signed in to view this page'),
1413 category='warning')
1416 category='warning')
1414 raise HTTPFound(
1417 raise HTTPFound(
1415 h.route_path('login', _query={'came_from': came_from}))
1418 h.route_path('login', _query={'came_from': came_from}))
1416
1419
1417 else:
1420 else:
1418 # redirect with 404 to prevent resource discovery
1421 # redirect with 404 to prevent resource discovery
1419 raise HTTPNotFound()
1422 raise HTTPNotFound()
1420
1423
1421 def check_permissions(self, user):
1424 def check_permissions(self, user):
1422 """Dummy function for overriding"""
1425 """Dummy function for overriding"""
1423 raise NotImplementedError(
1426 raise NotImplementedError(
1424 'You have to write this function in child class')
1427 'You have to write this function in child class')
1425
1428
1426
1429
1427 class HasPermissionAllDecorator(PermsDecorator):
1430 class HasPermissionAllDecorator(PermsDecorator):
1428 """
1431 """
1429 Checks for access permission for all given predicates. All of them
1432 Checks for access permission for all given predicates. All of them
1430 have to be meet in order to fulfill the request
1433 have to be meet in order to fulfill the request
1431 """
1434 """
1432
1435
1433 def check_permissions(self, user):
1436 def check_permissions(self, user):
1434 perms = user.permissions_with_scope({})
1437 perms = user.permissions_with_scope({})
1435 if self.required_perms.issubset(perms['global']):
1438 if self.required_perms.issubset(perms['global']):
1436 return True
1439 return True
1437 return False
1440 return False
1438
1441
1439
1442
1440 class HasPermissionAnyDecorator(PermsDecorator):
1443 class HasPermissionAnyDecorator(PermsDecorator):
1441 """
1444 """
1442 Checks for access permission for any of given predicates. In order to
1445 Checks for access permission for any of given predicates. In order to
1443 fulfill the request any of predicates must be meet
1446 fulfill the request any of predicates must be meet
1444 """
1447 """
1445
1448
1446 def check_permissions(self, user):
1449 def check_permissions(self, user):
1447 perms = user.permissions_with_scope({})
1450 perms = user.permissions_with_scope({})
1448 if self.required_perms.intersection(perms['global']):
1451 if self.required_perms.intersection(perms['global']):
1449 return True
1452 return True
1450 return False
1453 return False
1451
1454
1452
1455
1453 class HasRepoPermissionAllDecorator(PermsDecorator):
1456 class HasRepoPermissionAllDecorator(PermsDecorator):
1454 """
1457 """
1455 Checks for access permission for all given predicates for specific
1458 Checks for access permission for all given predicates for specific
1456 repository. All of them have to be meet in order to fulfill the request
1459 repository. All of them have to be meet in order to fulfill the request
1457 """
1460 """
1458 def _get_repo_name(self):
1461 def _get_repo_name(self):
1459 _request = self._get_request()
1462 _request = self._get_request()
1460 return get_repo_slug(_request)
1463 return get_repo_slug(_request)
1461
1464
1462 def check_permissions(self, user):
1465 def check_permissions(self, user):
1463 perms = user.permissions
1466 perms = user.permissions
1464 repo_name = self._get_repo_name()
1467 repo_name = self._get_repo_name()
1465
1468
1466 try:
1469 try:
1467 user_perms = set([perms['repositories'][repo_name]])
1470 user_perms = set([perms['repositories'][repo_name]])
1468 except KeyError:
1471 except KeyError:
1469 log.debug('cannot locate repo with name: `%s` in permissions defs',
1472 log.debug('cannot locate repo with name: `%s` in permissions defs',
1470 repo_name)
1473 repo_name)
1471 return False
1474 return False
1472
1475
1473 log.debug('checking `%s` permissions for repo `%s`',
1476 log.debug('checking `%s` permissions for repo `%s`',
1474 user_perms, repo_name)
1477 user_perms, repo_name)
1475 if self.required_perms.issubset(user_perms):
1478 if self.required_perms.issubset(user_perms):
1476 return True
1479 return True
1477 return False
1480 return False
1478
1481
1479
1482
1480 class HasRepoPermissionAnyDecorator(PermsDecorator):
1483 class HasRepoPermissionAnyDecorator(PermsDecorator):
1481 """
1484 """
1482 Checks for access permission for any of given predicates for specific
1485 Checks for access permission for any of given predicates for specific
1483 repository. In order to fulfill the request any of predicates must be meet
1486 repository. In order to fulfill the request any of predicates must be meet
1484 """
1487 """
1485 def _get_repo_name(self):
1488 def _get_repo_name(self):
1486 _request = self._get_request()
1489 _request = self._get_request()
1487 return get_repo_slug(_request)
1490 return get_repo_slug(_request)
1488
1491
1489 def check_permissions(self, user):
1492 def check_permissions(self, user):
1490 perms = user.permissions
1493 perms = user.permissions
1491 repo_name = self._get_repo_name()
1494 repo_name = self._get_repo_name()
1492
1495
1493 try:
1496 try:
1494 user_perms = set([perms['repositories'][repo_name]])
1497 user_perms = set([perms['repositories'][repo_name]])
1495 except KeyError:
1498 except KeyError:
1496 log.debug('cannot locate repo with name: `%s` in permissions defs',
1499 log.debug('cannot locate repo with name: `%s` in permissions defs',
1497 repo_name)
1500 repo_name)
1498 return False
1501 return False
1499
1502
1500 log.debug('checking `%s` permissions for repo `%s`',
1503 log.debug('checking `%s` permissions for repo `%s`',
1501 user_perms, repo_name)
1504 user_perms, repo_name)
1502 if self.required_perms.intersection(user_perms):
1505 if self.required_perms.intersection(user_perms):
1503 return True
1506 return True
1504 return False
1507 return False
1505
1508
1506
1509
1507 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1510 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1508 """
1511 """
1509 Checks for access permission for all given predicates for specific
1512 Checks for access permission for all given predicates for specific
1510 repository group. All of them have to be meet in order to
1513 repository group. All of them have to be meet in order to
1511 fulfill the request
1514 fulfill the request
1512 """
1515 """
1513 def _get_repo_group_name(self):
1516 def _get_repo_group_name(self):
1514 _request = self._get_request()
1517 _request = self._get_request()
1515 return get_repo_group_slug(_request)
1518 return get_repo_group_slug(_request)
1516
1519
1517 def check_permissions(self, user):
1520 def check_permissions(self, user):
1518 perms = user.permissions
1521 perms = user.permissions
1519 group_name = self._get_repo_group_name()
1522 group_name = self._get_repo_group_name()
1520 try:
1523 try:
1521 user_perms = set([perms['repositories_groups'][group_name]])
1524 user_perms = set([perms['repositories_groups'][group_name]])
1522 except KeyError:
1525 except KeyError:
1523 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1526 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1524 group_name)
1527 group_name)
1525 return False
1528 return False
1526
1529
1527 log.debug('checking `%s` permissions for repo group `%s`',
1530 log.debug('checking `%s` permissions for repo group `%s`',
1528 user_perms, group_name)
1531 user_perms, group_name)
1529 if self.required_perms.issubset(user_perms):
1532 if self.required_perms.issubset(user_perms):
1530 return True
1533 return True
1531 return False
1534 return False
1532
1535
1533
1536
1534 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1537 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1535 """
1538 """
1536 Checks for access permission for any of given predicates for specific
1539 Checks for access permission for any of given predicates for specific
1537 repository group. In order to fulfill the request any
1540 repository group. In order to fulfill the request any
1538 of predicates must be met
1541 of predicates must be met
1539 """
1542 """
1540 def _get_repo_group_name(self):
1543 def _get_repo_group_name(self):
1541 _request = self._get_request()
1544 _request = self._get_request()
1542 return get_repo_group_slug(_request)
1545 return get_repo_group_slug(_request)
1543
1546
1544 def check_permissions(self, user):
1547 def check_permissions(self, user):
1545 perms = user.permissions
1548 perms = user.permissions
1546 group_name = self._get_repo_group_name()
1549 group_name = self._get_repo_group_name()
1547
1550
1548 try:
1551 try:
1549 user_perms = set([perms['repositories_groups'][group_name]])
1552 user_perms = set([perms['repositories_groups'][group_name]])
1550 except KeyError:
1553 except KeyError:
1551 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1554 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1552 group_name)
1555 group_name)
1553 return False
1556 return False
1554
1557
1555 log.debug('checking `%s` permissions for repo group `%s`',
1558 log.debug('checking `%s` permissions for repo group `%s`',
1556 user_perms, group_name)
1559 user_perms, group_name)
1557 if self.required_perms.intersection(user_perms):
1560 if self.required_perms.intersection(user_perms):
1558 return True
1561 return True
1559 return False
1562 return False
1560
1563
1561
1564
1562 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1565 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1563 """
1566 """
1564 Checks for access permission for all given predicates for specific
1567 Checks for access permission for all given predicates for specific
1565 user group. All of them have to be meet in order to fulfill the request
1568 user group. All of them have to be meet in order to fulfill the request
1566 """
1569 """
1567 def _get_user_group_name(self):
1570 def _get_user_group_name(self):
1568 _request = self._get_request()
1571 _request = self._get_request()
1569 return get_user_group_slug(_request)
1572 return get_user_group_slug(_request)
1570
1573
1571 def check_permissions(self, user):
1574 def check_permissions(self, user):
1572 perms = user.permissions
1575 perms = user.permissions
1573 group_name = self._get_user_group_name()
1576 group_name = self._get_user_group_name()
1574 try:
1577 try:
1575 user_perms = set([perms['user_groups'][group_name]])
1578 user_perms = set([perms['user_groups'][group_name]])
1576 except KeyError:
1579 except KeyError:
1577 return False
1580 return False
1578
1581
1579 if self.required_perms.issubset(user_perms):
1582 if self.required_perms.issubset(user_perms):
1580 return True
1583 return True
1581 return False
1584 return False
1582
1585
1583
1586
1584 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1587 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1585 """
1588 """
1586 Checks for access permission for any of given predicates for specific
1589 Checks for access permission for any of given predicates for specific
1587 user group. In order to fulfill the request any of predicates must be meet
1590 user group. In order to fulfill the request any of predicates must be meet
1588 """
1591 """
1589 def _get_user_group_name(self):
1592 def _get_user_group_name(self):
1590 _request = self._get_request()
1593 _request = self._get_request()
1591 return get_user_group_slug(_request)
1594 return get_user_group_slug(_request)
1592
1595
1593 def check_permissions(self, user):
1596 def check_permissions(self, user):
1594 perms = user.permissions
1597 perms = user.permissions
1595 group_name = self._get_user_group_name()
1598 group_name = self._get_user_group_name()
1596 try:
1599 try:
1597 user_perms = set([perms['user_groups'][group_name]])
1600 user_perms = set([perms['user_groups'][group_name]])
1598 except KeyError:
1601 except KeyError:
1599 return False
1602 return False
1600
1603
1601 if self.required_perms.intersection(user_perms):
1604 if self.required_perms.intersection(user_perms):
1602 return True
1605 return True
1603 return False
1606 return False
1604
1607
1605
1608
1606 # CHECK FUNCTIONS
1609 # CHECK FUNCTIONS
1607 class PermsFunction(object):
1610 class PermsFunction(object):
1608 """Base function for other check functions"""
1611 """Base function for other check functions"""
1609
1612
1610 def __init__(self, *perms):
1613 def __init__(self, *perms):
1611 self.required_perms = set(perms)
1614 self.required_perms = set(perms)
1612 self.repo_name = None
1615 self.repo_name = None
1613 self.repo_group_name = None
1616 self.repo_group_name = None
1614 self.user_group_name = None
1617 self.user_group_name = None
1615
1618
1616 def __bool__(self):
1619 def __bool__(self):
1617 frame = inspect.currentframe()
1620 frame = inspect.currentframe()
1618 stack_trace = traceback.format_stack(frame)
1621 stack_trace = traceback.format_stack(frame)
1619 log.error('Checking bool value on a class instance of perm '
1622 log.error('Checking bool value on a class instance of perm '
1620 'function is not allowed: %s' % ''.join(stack_trace))
1623 'function is not allowed: %s' % ''.join(stack_trace))
1621 # rather than throwing errors, here we always return False so if by
1624 # rather than throwing errors, here we always return False so if by
1622 # accident someone checks truth for just an instance it will always end
1625 # accident someone checks truth for just an instance it will always end
1623 # up in returning False
1626 # up in returning False
1624 return False
1627 return False
1625 __nonzero__ = __bool__
1628 __nonzero__ = __bool__
1626
1629
1627 def __call__(self, check_location='', user=None):
1630 def __call__(self, check_location='', user=None):
1628 if not user:
1631 if not user:
1629 log.debug('Using user attribute from global request')
1632 log.debug('Using user attribute from global request')
1630 # TODO: remove this someday,put as user as attribute here
1633 # TODO: remove this someday,put as user as attribute here
1631 request = self._get_request()
1634 request = self._get_request()
1632 user = request.user
1635 user = request.user
1633
1636
1634 # init auth user if not already given
1637 # init auth user if not already given
1635 if not isinstance(user, AuthUser):
1638 if not isinstance(user, AuthUser):
1636 log.debug('Wrapping user %s into AuthUser', user)
1639 log.debug('Wrapping user %s into AuthUser', user)
1637 user = AuthUser(user.user_id)
1640 user = AuthUser(user.user_id)
1638
1641
1639 cls_name = self.__class__.__name__
1642 cls_name = self.__class__.__name__
1640 check_scope = self._get_check_scope(cls_name)
1643 check_scope = self._get_check_scope(cls_name)
1641 check_location = check_location or 'unspecified location'
1644 check_location = check_location or 'unspecified location'
1642
1645
1643 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1646 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1644 self.required_perms, user, check_scope, check_location)
1647 self.required_perms, user, check_scope, check_location)
1645 if not user:
1648 if not user:
1646 log.warning('Empty user given for permission check')
1649 log.warning('Empty user given for permission check')
1647 return False
1650 return False
1648
1651
1649 if self.check_permissions(user):
1652 if self.check_permissions(user):
1650 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1653 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1651 check_scope, user, check_location)
1654 check_scope, user, check_location)
1652 return True
1655 return True
1653
1656
1654 else:
1657 else:
1655 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1658 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1656 check_scope, user, check_location)
1659 check_scope, user, check_location)
1657 return False
1660 return False
1658
1661
1659 def _get_request(self):
1662 def _get_request(self):
1660 return get_request(self)
1663 return get_request(self)
1661
1664
1662 def _get_check_scope(self, cls_name):
1665 def _get_check_scope(self, cls_name):
1663 return {
1666 return {
1664 'HasPermissionAll': 'GLOBAL',
1667 'HasPermissionAll': 'GLOBAL',
1665 'HasPermissionAny': 'GLOBAL',
1668 'HasPermissionAny': 'GLOBAL',
1666 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1669 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1667 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1670 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1668 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1671 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1669 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1672 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1670 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1673 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1671 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1674 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1672 }.get(cls_name, '?:%s' % cls_name)
1675 }.get(cls_name, '?:%s' % cls_name)
1673
1676
1674 def check_permissions(self, user):
1677 def check_permissions(self, user):
1675 """Dummy function for overriding"""
1678 """Dummy function for overriding"""
1676 raise Exception('You have to write this function in child class')
1679 raise Exception('You have to write this function in child class')
1677
1680
1678
1681
1679 class HasPermissionAll(PermsFunction):
1682 class HasPermissionAll(PermsFunction):
1680 def check_permissions(self, user):
1683 def check_permissions(self, user):
1681 perms = user.permissions_with_scope({})
1684 perms = user.permissions_with_scope({})
1682 if self.required_perms.issubset(perms.get('global')):
1685 if self.required_perms.issubset(perms.get('global')):
1683 return True
1686 return True
1684 return False
1687 return False
1685
1688
1686
1689
1687 class HasPermissionAny(PermsFunction):
1690 class HasPermissionAny(PermsFunction):
1688 def check_permissions(self, user):
1691 def check_permissions(self, user):
1689 perms = user.permissions_with_scope({})
1692 perms = user.permissions_with_scope({})
1690 if self.required_perms.intersection(perms.get('global')):
1693 if self.required_perms.intersection(perms.get('global')):
1691 return True
1694 return True
1692 return False
1695 return False
1693
1696
1694
1697
1695 class HasRepoPermissionAll(PermsFunction):
1698 class HasRepoPermissionAll(PermsFunction):
1696 def __call__(self, repo_name=None, check_location='', user=None):
1699 def __call__(self, repo_name=None, check_location='', user=None):
1697 self.repo_name = repo_name
1700 self.repo_name = repo_name
1698 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1701 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1699
1702
1700 def _get_repo_name(self):
1703 def _get_repo_name(self):
1701 if not self.repo_name:
1704 if not self.repo_name:
1702 _request = self._get_request()
1705 _request = self._get_request()
1703 self.repo_name = get_repo_slug(_request)
1706 self.repo_name = get_repo_slug(_request)
1704 return self.repo_name
1707 return self.repo_name
1705
1708
1706 def check_permissions(self, user):
1709 def check_permissions(self, user):
1707 self.repo_name = self._get_repo_name()
1710 self.repo_name = self._get_repo_name()
1708 perms = user.permissions
1711 perms = user.permissions
1709 try:
1712 try:
1710 user_perms = set([perms['repositories'][self.repo_name]])
1713 user_perms = set([perms['repositories'][self.repo_name]])
1711 except KeyError:
1714 except KeyError:
1712 return False
1715 return False
1713 if self.required_perms.issubset(user_perms):
1716 if self.required_perms.issubset(user_perms):
1714 return True
1717 return True
1715 return False
1718 return False
1716
1719
1717
1720
1718 class HasRepoPermissionAny(PermsFunction):
1721 class HasRepoPermissionAny(PermsFunction):
1719 def __call__(self, repo_name=None, check_location='', user=None):
1722 def __call__(self, repo_name=None, check_location='', user=None):
1720 self.repo_name = repo_name
1723 self.repo_name = repo_name
1721 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1724 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1722
1725
1723 def _get_repo_name(self):
1726 def _get_repo_name(self):
1724 if not self.repo_name:
1727 if not self.repo_name:
1725 _request = self._get_request()
1728 _request = self._get_request()
1726 self.repo_name = get_repo_slug(_request)
1729 self.repo_name = get_repo_slug(_request)
1727 return self.repo_name
1730 return self.repo_name
1728
1731
1729 def check_permissions(self, user):
1732 def check_permissions(self, user):
1730 self.repo_name = self._get_repo_name()
1733 self.repo_name = self._get_repo_name()
1731 perms = user.permissions
1734 perms = user.permissions
1732 try:
1735 try:
1733 user_perms = set([perms['repositories'][self.repo_name]])
1736 user_perms = set([perms['repositories'][self.repo_name]])
1734 except KeyError:
1737 except KeyError:
1735 return False
1738 return False
1736 if self.required_perms.intersection(user_perms):
1739 if self.required_perms.intersection(user_perms):
1737 return True
1740 return True
1738 return False
1741 return False
1739
1742
1740
1743
1741 class HasRepoGroupPermissionAny(PermsFunction):
1744 class HasRepoGroupPermissionAny(PermsFunction):
1742 def __call__(self, group_name=None, check_location='', user=None):
1745 def __call__(self, group_name=None, check_location='', user=None):
1743 self.repo_group_name = group_name
1746 self.repo_group_name = group_name
1744 return super(HasRepoGroupPermissionAny, self).__call__(
1747 return super(HasRepoGroupPermissionAny, self).__call__(
1745 check_location, user)
1748 check_location, user)
1746
1749
1747 def check_permissions(self, user):
1750 def check_permissions(self, user):
1748 perms = user.permissions
1751 perms = user.permissions
1749 try:
1752 try:
1750 user_perms = set(
1753 user_perms = set(
1751 [perms['repositories_groups'][self.repo_group_name]])
1754 [perms['repositories_groups'][self.repo_group_name]])
1752 except KeyError:
1755 except KeyError:
1753 return False
1756 return False
1754 if self.required_perms.intersection(user_perms):
1757 if self.required_perms.intersection(user_perms):
1755 return True
1758 return True
1756 return False
1759 return False
1757
1760
1758
1761
1759 class HasRepoGroupPermissionAll(PermsFunction):
1762 class HasRepoGroupPermissionAll(PermsFunction):
1760 def __call__(self, group_name=None, check_location='', user=None):
1763 def __call__(self, group_name=None, check_location='', user=None):
1761 self.repo_group_name = group_name
1764 self.repo_group_name = group_name
1762 return super(HasRepoGroupPermissionAll, self).__call__(
1765 return super(HasRepoGroupPermissionAll, self).__call__(
1763 check_location, user)
1766 check_location, user)
1764
1767
1765 def check_permissions(self, user):
1768 def check_permissions(self, user):
1766 perms = user.permissions
1769 perms = user.permissions
1767 try:
1770 try:
1768 user_perms = set(
1771 user_perms = set(
1769 [perms['repositories_groups'][self.repo_group_name]])
1772 [perms['repositories_groups'][self.repo_group_name]])
1770 except KeyError:
1773 except KeyError:
1771 return False
1774 return False
1772 if self.required_perms.issubset(user_perms):
1775 if self.required_perms.issubset(user_perms):
1773 return True
1776 return True
1774 return False
1777 return False
1775
1778
1776
1779
1777 class HasUserGroupPermissionAny(PermsFunction):
1780 class HasUserGroupPermissionAny(PermsFunction):
1778 def __call__(self, user_group_name=None, check_location='', user=None):
1781 def __call__(self, user_group_name=None, check_location='', user=None):
1779 self.user_group_name = user_group_name
1782 self.user_group_name = user_group_name
1780 return super(HasUserGroupPermissionAny, self).__call__(
1783 return super(HasUserGroupPermissionAny, self).__call__(
1781 check_location, user)
1784 check_location, user)
1782
1785
1783 def check_permissions(self, user):
1786 def check_permissions(self, user):
1784 perms = user.permissions
1787 perms = user.permissions
1785 try:
1788 try:
1786 user_perms = set([perms['user_groups'][self.user_group_name]])
1789 user_perms = set([perms['user_groups'][self.user_group_name]])
1787 except KeyError:
1790 except KeyError:
1788 return False
1791 return False
1789 if self.required_perms.intersection(user_perms):
1792 if self.required_perms.intersection(user_perms):
1790 return True
1793 return True
1791 return False
1794 return False
1792
1795
1793
1796
1794 class HasUserGroupPermissionAll(PermsFunction):
1797 class HasUserGroupPermissionAll(PermsFunction):
1795 def __call__(self, user_group_name=None, check_location='', user=None):
1798 def __call__(self, user_group_name=None, check_location='', user=None):
1796 self.user_group_name = user_group_name
1799 self.user_group_name = user_group_name
1797 return super(HasUserGroupPermissionAll, self).__call__(
1800 return super(HasUserGroupPermissionAll, self).__call__(
1798 check_location, user)
1801 check_location, user)
1799
1802
1800 def check_permissions(self, user):
1803 def check_permissions(self, user):
1801 perms = user.permissions
1804 perms = user.permissions
1802 try:
1805 try:
1803 user_perms = set([perms['user_groups'][self.user_group_name]])
1806 user_perms = set([perms['user_groups'][self.user_group_name]])
1804 except KeyError:
1807 except KeyError:
1805 return False
1808 return False
1806 if self.required_perms.issubset(user_perms):
1809 if self.required_perms.issubset(user_perms):
1807 return True
1810 return True
1808 return False
1811 return False
1809
1812
1810
1813
1811 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1814 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1812 class HasPermissionAnyMiddleware(object):
1815 class HasPermissionAnyMiddleware(object):
1813 def __init__(self, *perms):
1816 def __init__(self, *perms):
1814 self.required_perms = set(perms)
1817 self.required_perms = set(perms)
1815
1818
1816 def __call__(self, user, repo_name):
1819 def __call__(self, user, repo_name):
1817 # repo_name MUST be unicode, since we handle keys in permission
1820 # repo_name MUST be unicode, since we handle keys in permission
1818 # dict by unicode
1821 # dict by unicode
1819 repo_name = safe_unicode(repo_name)
1822 repo_name = safe_unicode(repo_name)
1820 user = AuthUser(user.user_id)
1823 user = AuthUser(user.user_id)
1821 log.debug(
1824 log.debug(
1822 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1825 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1823 self.required_perms, user, repo_name)
1826 self.required_perms, user, repo_name)
1824
1827
1825 if self.check_permissions(user, repo_name):
1828 if self.check_permissions(user, repo_name):
1826 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1829 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1827 repo_name, user, 'PermissionMiddleware')
1830 repo_name, user, 'PermissionMiddleware')
1828 return True
1831 return True
1829
1832
1830 else:
1833 else:
1831 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1834 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1832 repo_name, user, 'PermissionMiddleware')
1835 repo_name, user, 'PermissionMiddleware')
1833 return False
1836 return False
1834
1837
1835 def check_permissions(self, user, repo_name):
1838 def check_permissions(self, user, repo_name):
1836 perms = user.permissions_with_scope({'repo_name': repo_name})
1839 perms = user.permissions_with_scope({'repo_name': repo_name})
1837
1840
1838 try:
1841 try:
1839 user_perms = set([perms['repositories'][repo_name]])
1842 user_perms = set([perms['repositories'][repo_name]])
1840 except Exception:
1843 except Exception:
1841 log.exception('Error while accessing user permissions')
1844 log.exception('Error while accessing user permissions')
1842 return False
1845 return False
1843
1846
1844 if self.required_perms.intersection(user_perms):
1847 if self.required_perms.intersection(user_perms):
1845 return True
1848 return True
1846 return False
1849 return False
1847
1850
1848
1851
1849 # SPECIAL VERSION TO HANDLE API AUTH
1852 # SPECIAL VERSION TO HANDLE API AUTH
1850 class _BaseApiPerm(object):
1853 class _BaseApiPerm(object):
1851 def __init__(self, *perms):
1854 def __init__(self, *perms):
1852 self.required_perms = set(perms)
1855 self.required_perms = set(perms)
1853
1856
1854 def __call__(self, check_location=None, user=None, repo_name=None,
1857 def __call__(self, check_location=None, user=None, repo_name=None,
1855 group_name=None, user_group_name=None):
1858 group_name=None, user_group_name=None):
1856 cls_name = self.__class__.__name__
1859 cls_name = self.__class__.__name__
1857 check_scope = 'global:%s' % (self.required_perms,)
1860 check_scope = 'global:%s' % (self.required_perms,)
1858 if repo_name:
1861 if repo_name:
1859 check_scope += ', repo_name:%s' % (repo_name,)
1862 check_scope += ', repo_name:%s' % (repo_name,)
1860
1863
1861 if group_name:
1864 if group_name:
1862 check_scope += ', repo_group_name:%s' % (group_name,)
1865 check_scope += ', repo_group_name:%s' % (group_name,)
1863
1866
1864 if user_group_name:
1867 if user_group_name:
1865 check_scope += ', user_group_name:%s' % (user_group_name,)
1868 check_scope += ', user_group_name:%s' % (user_group_name,)
1866
1869
1867 log.debug(
1870 log.debug(
1868 'checking cls:%s %s %s @ %s'
1871 'checking cls:%s %s %s @ %s'
1869 % (cls_name, self.required_perms, check_scope, check_location))
1872 % (cls_name, self.required_perms, check_scope, check_location))
1870 if not user:
1873 if not user:
1871 log.debug('Empty User passed into arguments')
1874 log.debug('Empty User passed into arguments')
1872 return False
1875 return False
1873
1876
1874 # process user
1877 # process user
1875 if not isinstance(user, AuthUser):
1878 if not isinstance(user, AuthUser):
1876 user = AuthUser(user.user_id)
1879 user = AuthUser(user.user_id)
1877 if not check_location:
1880 if not check_location:
1878 check_location = 'unspecified'
1881 check_location = 'unspecified'
1879 if self.check_permissions(user.permissions, repo_name, group_name,
1882 if self.check_permissions(user.permissions, repo_name, group_name,
1880 user_group_name):
1883 user_group_name):
1881 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1884 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1882 check_scope, user, check_location)
1885 check_scope, user, check_location)
1883 return True
1886 return True
1884
1887
1885 else:
1888 else:
1886 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1889 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1887 check_scope, user, check_location)
1890 check_scope, user, check_location)
1888 return False
1891 return False
1889
1892
1890 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1893 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1891 user_group_name=None):
1894 user_group_name=None):
1892 """
1895 """
1893 implement in child class should return True if permissions are ok,
1896 implement in child class should return True if permissions are ok,
1894 False otherwise
1897 False otherwise
1895
1898
1896 :param perm_defs: dict with permission definitions
1899 :param perm_defs: dict with permission definitions
1897 :param repo_name: repo name
1900 :param repo_name: repo name
1898 """
1901 """
1899 raise NotImplementedError()
1902 raise NotImplementedError()
1900
1903
1901
1904
1902 class HasPermissionAllApi(_BaseApiPerm):
1905 class HasPermissionAllApi(_BaseApiPerm):
1903 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1906 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1904 user_group_name=None):
1907 user_group_name=None):
1905 if self.required_perms.issubset(perm_defs.get('global')):
1908 if self.required_perms.issubset(perm_defs.get('global')):
1906 return True
1909 return True
1907 return False
1910 return False
1908
1911
1909
1912
1910 class HasPermissionAnyApi(_BaseApiPerm):
1913 class HasPermissionAnyApi(_BaseApiPerm):
1911 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1914 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1912 user_group_name=None):
1915 user_group_name=None):
1913 if self.required_perms.intersection(perm_defs.get('global')):
1916 if self.required_perms.intersection(perm_defs.get('global')):
1914 return True
1917 return True
1915 return False
1918 return False
1916
1919
1917
1920
1918 class HasRepoPermissionAllApi(_BaseApiPerm):
1921 class HasRepoPermissionAllApi(_BaseApiPerm):
1919 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1922 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1920 user_group_name=None):
1923 user_group_name=None):
1921 try:
1924 try:
1922 _user_perms = set([perm_defs['repositories'][repo_name]])
1925 _user_perms = set([perm_defs['repositories'][repo_name]])
1923 except KeyError:
1926 except KeyError:
1924 log.warning(traceback.format_exc())
1927 log.warning(traceback.format_exc())
1925 return False
1928 return False
1926 if self.required_perms.issubset(_user_perms):
1929 if self.required_perms.issubset(_user_perms):
1927 return True
1930 return True
1928 return False
1931 return False
1929
1932
1930
1933
1931 class HasRepoPermissionAnyApi(_BaseApiPerm):
1934 class HasRepoPermissionAnyApi(_BaseApiPerm):
1932 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1935 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1933 user_group_name=None):
1936 user_group_name=None):
1934 try:
1937 try:
1935 _user_perms = set([perm_defs['repositories'][repo_name]])
1938 _user_perms = set([perm_defs['repositories'][repo_name]])
1936 except KeyError:
1939 except KeyError:
1937 log.warning(traceback.format_exc())
1940 log.warning(traceback.format_exc())
1938 return False
1941 return False
1939 if self.required_perms.intersection(_user_perms):
1942 if self.required_perms.intersection(_user_perms):
1940 return True
1943 return True
1941 return False
1944 return False
1942
1945
1943
1946
1944 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1947 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1945 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1948 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1946 user_group_name=None):
1949 user_group_name=None):
1947 try:
1950 try:
1948 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1951 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1949 except KeyError:
1952 except KeyError:
1950 log.warning(traceback.format_exc())
1953 log.warning(traceback.format_exc())
1951 return False
1954 return False
1952 if self.required_perms.intersection(_user_perms):
1955 if self.required_perms.intersection(_user_perms):
1953 return True
1956 return True
1954 return False
1957 return False
1955
1958
1956
1959
1957 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1960 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1958 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1961 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1959 user_group_name=None):
1962 user_group_name=None):
1960 try:
1963 try:
1961 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1964 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1962 except KeyError:
1965 except KeyError:
1963 log.warning(traceback.format_exc())
1966 log.warning(traceback.format_exc())
1964 return False
1967 return False
1965 if self.required_perms.issubset(_user_perms):
1968 if self.required_perms.issubset(_user_perms):
1966 return True
1969 return True
1967 return False
1970 return False
1968
1971
1969
1972
1970 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1973 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1971 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1974 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1972 user_group_name=None):
1975 user_group_name=None):
1973 try:
1976 try:
1974 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1977 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1975 except KeyError:
1978 except KeyError:
1976 log.warning(traceback.format_exc())
1979 log.warning(traceback.format_exc())
1977 return False
1980 return False
1978 if self.required_perms.intersection(_user_perms):
1981 if self.required_perms.intersection(_user_perms):
1979 return True
1982 return True
1980 return False
1983 return False
1981
1984
1982
1985
1983 def check_ip_access(source_ip, allowed_ips=None):
1986 def check_ip_access(source_ip, allowed_ips=None):
1984 """
1987 """
1985 Checks if source_ip is a subnet of any of allowed_ips.
1988 Checks if source_ip is a subnet of any of allowed_ips.
1986
1989
1987 :param source_ip:
1990 :param source_ip:
1988 :param allowed_ips: list of allowed ips together with mask
1991 :param allowed_ips: list of allowed ips together with mask
1989 """
1992 """
1990 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1993 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1991 source_ip_address = ipaddress.ip_address(source_ip)
1994 source_ip_address = ipaddress.ip_address(source_ip)
1992 if isinstance(allowed_ips, (tuple, list, set)):
1995 if isinstance(allowed_ips, (tuple, list, set)):
1993 for ip in allowed_ips:
1996 for ip in allowed_ips:
1994 try:
1997 try:
1995 network_address = ipaddress.ip_network(ip, strict=False)
1998 network_address = ipaddress.ip_network(ip, strict=False)
1996 if source_ip_address in network_address:
1999 if source_ip_address in network_address:
1997 log.debug('IP %s is network %s' %
2000 log.debug('IP %s is network %s' %
1998 (source_ip_address, network_address))
2001 (source_ip_address, network_address))
1999 return True
2002 return True
2000 # for any case we cannot determine the IP, don't crash just
2003 # for any case we cannot determine the IP, don't crash just
2001 # skip it and log as error, we want to say forbidden still when
2004 # skip it and log as error, we want to say forbidden still when
2002 # sending bad IP
2005 # sending bad IP
2003 except Exception:
2006 except Exception:
2004 log.error(traceback.format_exc())
2007 log.error(traceback.format_exc())
2005 continue
2008 continue
2006 return False
2009 return False
2007
2010
2008
2011
2009 def get_cython_compat_decorator(wrapper, func):
2012 def get_cython_compat_decorator(wrapper, func):
2010 """
2013 """
2011 Creates a cython compatible decorator. The previously used
2014 Creates a cython compatible decorator. The previously used
2012 decorator.decorator() function seems to be incompatible with cython.
2015 decorator.decorator() function seems to be incompatible with cython.
2013
2016
2014 :param wrapper: __wrapper method of the decorator class
2017 :param wrapper: __wrapper method of the decorator class
2015 :param func: decorated function
2018 :param func: decorated function
2016 """
2019 """
2017 @wraps(func)
2020 @wraps(func)
2018 def local_wrapper(*args, **kwds):
2021 def local_wrapper(*args, **kwds):
2019 return wrapper(func, *args, **kwds)
2022 return wrapper(func, *args, **kwds)
2020 local_wrapper.__wrapped__ = func
2023 local_wrapper.__wrapped__ = func
2021 return local_wrapper
2024 return local_wrapper
2022
2025
2023
2026
@@ -1,617 +1,631 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 The base Controller API
22 The base Controller API
23 Provides the BaseController class for subclassing. And usage in different
23 Provides the BaseController class for subclassing. And usage in different
24 controllers
24 controllers
25 """
25 """
26
26
27 import logging
27 import logging
28 import socket
28 import socket
29
29
30 import ipaddress
30 import ipaddress
31 import pyramid.threadlocal
31 import pyramid.threadlocal
32
32
33 from paste.auth.basic import AuthBasicAuthenticator
33 from paste.auth.basic import AuthBasicAuthenticator
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 from pylons import config, tmpl_context as c, request, session, url
36 from pylons import config, tmpl_context as c, request, url
37 from pylons.controllers import WSGIController
37 from pylons.controllers import WSGIController
38 from pylons.controllers.util import redirect
38 from pylons.controllers.util import redirect
39 from pylons.i18n import translation
39 from pylons.i18n import translation
40 # marcink: don't remove this import
40 # marcink: don't remove this import
41 from pylons.templating import render_mako as render # noqa
41 from pylons.templating import render_mako as render # noqa
42 from pylons.i18n.translation import _
42 from pylons.i18n.translation import _
43 from webob.exc import HTTPFound
43 from webob.exc import HTTPFound
44
44
45
45
46 import rhodecode
46 import rhodecode
47 from rhodecode.authentication.base import VCS_TYPE
47 from rhodecode.authentication.base import VCS_TYPE
48 from rhodecode.lib import auth, utils2
48 from rhodecode.lib import auth, utils2
49 from rhodecode.lib import helpers as h
49 from rhodecode.lib import helpers as h
50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
51 from rhodecode.lib.exceptions import UserCreationError
51 from rhodecode.lib.exceptions import UserCreationError
52 from rhodecode.lib.utils import (
52 from rhodecode.lib.utils import (
53 get_repo_slug, set_rhodecode_config, password_changed,
53 get_repo_slug, set_rhodecode_config, password_changed,
54 get_enabled_hook_classes)
54 get_enabled_hook_classes)
55 from rhodecode.lib.utils2 import (
55 from rhodecode.lib.utils2 import (
56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
58 from rhodecode.model import meta
58 from rhodecode.model import meta
59 from rhodecode.model.db import Repository, User, ChangesetComment
59 from rhodecode.model.db import Repository, User, ChangesetComment
60 from rhodecode.model.notification import NotificationModel
60 from rhodecode.model.notification import NotificationModel
61 from rhodecode.model.scm import ScmModel
61 from rhodecode.model.scm import ScmModel
62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
63
63
64
64
65 log = logging.getLogger(__name__)
65 log = logging.getLogger(__name__)
66
66
67
67
68 def _filter_proxy(ip):
68 def _filter_proxy(ip):
69 """
69 """
70 Passed in IP addresses in HEADERS can be in a special format of multiple
70 Passed in IP addresses in HEADERS can be in a special format of multiple
71 ips. Those comma separated IPs are passed from various proxies in the
71 ips. Those comma separated IPs are passed from various proxies in the
72 chain of request processing. The left-most being the original client.
72 chain of request processing. The left-most being the original client.
73 We only care about the first IP which came from the org. client.
73 We only care about the first IP which came from the org. client.
74
74
75 :param ip: ip string from headers
75 :param ip: ip string from headers
76 """
76 """
77 if ',' in ip:
77 if ',' in ip:
78 _ips = ip.split(',')
78 _ips = ip.split(',')
79 _first_ip = _ips[0].strip()
79 _first_ip = _ips[0].strip()
80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
81 return _first_ip
81 return _first_ip
82 return ip
82 return ip
83
83
84
84
85 def _filter_port(ip):
85 def _filter_port(ip):
86 """
86 """
87 Removes a port from ip, there are 4 main cases to handle here.
87 Removes a port from ip, there are 4 main cases to handle here.
88 - ipv4 eg. 127.0.0.1
88 - ipv4 eg. 127.0.0.1
89 - ipv6 eg. ::1
89 - ipv6 eg. ::1
90 - ipv4+port eg. 127.0.0.1:8080
90 - ipv4+port eg. 127.0.0.1:8080
91 - ipv6+port eg. [::1]:8080
91 - ipv6+port eg. [::1]:8080
92
92
93 :param ip:
93 :param ip:
94 """
94 """
95 def is_ipv6(ip_addr):
95 def is_ipv6(ip_addr):
96 if hasattr(socket, 'inet_pton'):
96 if hasattr(socket, 'inet_pton'):
97 try:
97 try:
98 socket.inet_pton(socket.AF_INET6, ip_addr)
98 socket.inet_pton(socket.AF_INET6, ip_addr)
99 except socket.error:
99 except socket.error:
100 return False
100 return False
101 else:
101 else:
102 # fallback to ipaddress
102 # fallback to ipaddress
103 try:
103 try:
104 ipaddress.IPv6Address(ip_addr)
104 ipaddress.IPv6Address(ip_addr)
105 except Exception:
105 except Exception:
106 return False
106 return False
107 return True
107 return True
108
108
109 if ':' not in ip: # must be ipv4 pure ip
109 if ':' not in ip: # must be ipv4 pure ip
110 return ip
110 return ip
111
111
112 if '[' in ip and ']' in ip: # ipv6 with port
112 if '[' in ip and ']' in ip: # ipv6 with port
113 return ip.split(']')[0][1:].lower()
113 return ip.split(']')[0][1:].lower()
114
114
115 # must be ipv6 or ipv4 with port
115 # must be ipv6 or ipv4 with port
116 if is_ipv6(ip):
116 if is_ipv6(ip):
117 return ip
117 return ip
118 else:
118 else:
119 ip, _port = ip.split(':')[:2] # means ipv4+port
119 ip, _port = ip.split(':')[:2] # means ipv4+port
120 return ip
120 return ip
121
121
122
122
123 def get_ip_addr(environ):
123 def get_ip_addr(environ):
124 proxy_key = 'HTTP_X_REAL_IP'
124 proxy_key = 'HTTP_X_REAL_IP'
125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
126 def_key = 'REMOTE_ADDR'
126 def_key = 'REMOTE_ADDR'
127 _filters = lambda x: _filter_port(_filter_proxy(x))
127 _filters = lambda x: _filter_port(_filter_proxy(x))
128
128
129 ip = environ.get(proxy_key)
129 ip = environ.get(proxy_key)
130 if ip:
130 if ip:
131 return _filters(ip)
131 return _filters(ip)
132
132
133 ip = environ.get(proxy_key2)
133 ip = environ.get(proxy_key2)
134 if ip:
134 if ip:
135 return _filters(ip)
135 return _filters(ip)
136
136
137 ip = environ.get(def_key, '0.0.0.0')
137 ip = environ.get(def_key, '0.0.0.0')
138 return _filters(ip)
138 return _filters(ip)
139
139
140
140
141 def get_server_ip_addr(environ, log_errors=True):
141 def get_server_ip_addr(environ, log_errors=True):
142 hostname = environ.get('SERVER_NAME')
142 hostname = environ.get('SERVER_NAME')
143 try:
143 try:
144 return socket.gethostbyname(hostname)
144 return socket.gethostbyname(hostname)
145 except Exception as e:
145 except Exception as e:
146 if log_errors:
146 if log_errors:
147 # in some cases this lookup is not possible, and we don't want to
147 # in some cases this lookup is not possible, and we don't want to
148 # make it an exception in logs
148 # make it an exception in logs
149 log.exception('Could not retrieve server ip address: %s', e)
149 log.exception('Could not retrieve server ip address: %s', e)
150 return hostname
150 return hostname
151
151
152
152
153 def get_server_port(environ):
153 def get_server_port(environ):
154 return environ.get('SERVER_PORT')
154 return environ.get('SERVER_PORT')
155
155
156
156
157 def get_access_path(environ):
157 def get_access_path(environ):
158 path = environ.get('PATH_INFO')
158 path = environ.get('PATH_INFO')
159 org_req = environ.get('pylons.original_request')
159 org_req = environ.get('pylons.original_request')
160 if org_req:
160 if org_req:
161 path = org_req.environ.get('PATH_INFO')
161 path = org_req.environ.get('PATH_INFO')
162 return path
162 return path
163
163
164
164
165 def get_user_agent(environ):
165 def get_user_agent(environ):
166 return environ.get('HTTP_USER_AGENT')
166 return environ.get('HTTP_USER_AGENT')
167
167
168
168
169 def vcs_operation_context(
169 def vcs_operation_context(
170 environ, repo_name, username, action, scm, check_locking=True,
170 environ, repo_name, username, action, scm, check_locking=True,
171 is_shadow_repo=False):
171 is_shadow_repo=False):
172 """
172 """
173 Generate the context for a vcs operation, e.g. push or pull.
173 Generate the context for a vcs operation, e.g. push or pull.
174
174
175 This context is passed over the layers so that hooks triggered by the
175 This context is passed over the layers so that hooks triggered by the
176 vcs operation know details like the user, the user's IP address etc.
176 vcs operation know details like the user, the user's IP address etc.
177
177
178 :param check_locking: Allows to switch of the computation of the locking
178 :param check_locking: Allows to switch of the computation of the locking
179 data. This serves mainly the need of the simplevcs middleware to be
179 data. This serves mainly the need of the simplevcs middleware to be
180 able to disable this for certain operations.
180 able to disable this for certain operations.
181
181
182 """
182 """
183 # Tri-state value: False: unlock, None: nothing, True: lock
183 # Tri-state value: False: unlock, None: nothing, True: lock
184 make_lock = None
184 make_lock = None
185 locked_by = [None, None, None]
185 locked_by = [None, None, None]
186 is_anonymous = username == User.DEFAULT_USER
186 is_anonymous = username == User.DEFAULT_USER
187 if not is_anonymous and check_locking:
187 if not is_anonymous and check_locking:
188 log.debug('Checking locking on repository "%s"', repo_name)
188 log.debug('Checking locking on repository "%s"', repo_name)
189 user = User.get_by_username(username)
189 user = User.get_by_username(username)
190 repo = Repository.get_by_repo_name(repo_name)
190 repo = Repository.get_by_repo_name(repo_name)
191 make_lock, __, locked_by = repo.get_locking_state(
191 make_lock, __, locked_by = repo.get_locking_state(
192 action, user.user_id)
192 action, user.user_id)
193
193
194 settings_model = VcsSettingsModel(repo=repo_name)
194 settings_model = VcsSettingsModel(repo=repo_name)
195 ui_settings = settings_model.get_ui_settings()
195 ui_settings = settings_model.get_ui_settings()
196
196
197 extras = {
197 extras = {
198 'ip': get_ip_addr(environ),
198 'ip': get_ip_addr(environ),
199 'username': username,
199 'username': username,
200 'action': action,
200 'action': action,
201 'repository': repo_name,
201 'repository': repo_name,
202 'scm': scm,
202 'scm': scm,
203 'config': rhodecode.CONFIG['__file__'],
203 'config': rhodecode.CONFIG['__file__'],
204 'make_lock': make_lock,
204 'make_lock': make_lock,
205 'locked_by': locked_by,
205 'locked_by': locked_by,
206 'server_url': utils2.get_server_url(environ),
206 'server_url': utils2.get_server_url(environ),
207 'user_agent': get_user_agent(environ),
207 'user_agent': get_user_agent(environ),
208 'hooks': get_enabled_hook_classes(ui_settings),
208 'hooks': get_enabled_hook_classes(ui_settings),
209 'is_shadow_repo': is_shadow_repo,
209 'is_shadow_repo': is_shadow_repo,
210 }
210 }
211 return extras
211 return extras
212
212
213
213
214 class BasicAuth(AuthBasicAuthenticator):
214 class BasicAuth(AuthBasicAuthenticator):
215
215
216 def __init__(self, realm, authfunc, registry, auth_http_code=None,
216 def __init__(self, realm, authfunc, registry, auth_http_code=None,
217 initial_call_detection=False, acl_repo_name=None):
217 initial_call_detection=False, acl_repo_name=None):
218 self.realm = realm
218 self.realm = realm
219 self.initial_call = initial_call_detection
219 self.initial_call = initial_call_detection
220 self.authfunc = authfunc
220 self.authfunc = authfunc
221 self.registry = registry
221 self.registry = registry
222 self.acl_repo_name = acl_repo_name
222 self.acl_repo_name = acl_repo_name
223 self._rc_auth_http_code = auth_http_code
223 self._rc_auth_http_code = auth_http_code
224
224
225 def _get_response_from_code(self, http_code):
225 def _get_response_from_code(self, http_code):
226 try:
226 try:
227 return get_exception(safe_int(http_code))
227 return get_exception(safe_int(http_code))
228 except Exception:
228 except Exception:
229 log.exception('Failed to fetch response for code %s' % http_code)
229 log.exception('Failed to fetch response for code %s' % http_code)
230 return HTTPForbidden
230 return HTTPForbidden
231
231
232 def build_authentication(self):
232 def build_authentication(self):
233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 if self._rc_auth_http_code and not self.initial_call:
234 if self._rc_auth_http_code and not self.initial_call:
235 # return alternative HTTP code if alternative http return code
235 # return alternative HTTP code if alternative http return code
236 # is specified in RhodeCode config, but ONLY if it's not the
236 # is specified in RhodeCode config, but ONLY if it's not the
237 # FIRST call
237 # FIRST call
238 custom_response_klass = self._get_response_from_code(
238 custom_response_klass = self._get_response_from_code(
239 self._rc_auth_http_code)
239 self._rc_auth_http_code)
240 return custom_response_klass(headers=head)
240 return custom_response_klass(headers=head)
241 return HTTPUnauthorized(headers=head)
241 return HTTPUnauthorized(headers=head)
242
242
243 def authenticate(self, environ):
243 def authenticate(self, environ):
244 authorization = AUTHORIZATION(environ)
244 authorization = AUTHORIZATION(environ)
245 if not authorization:
245 if not authorization:
246 return self.build_authentication()
246 return self.build_authentication()
247 (authmeth, auth) = authorization.split(' ', 1)
247 (authmeth, auth) = authorization.split(' ', 1)
248 if 'basic' != authmeth.lower():
248 if 'basic' != authmeth.lower():
249 return self.build_authentication()
249 return self.build_authentication()
250 auth = auth.strip().decode('base64')
250 auth = auth.strip().decode('base64')
251 _parts = auth.split(':', 1)
251 _parts = auth.split(':', 1)
252 if len(_parts) == 2:
252 if len(_parts) == 2:
253 username, password = _parts
253 username, password = _parts
254 if self.authfunc(
254 if self.authfunc(
255 username, password, environ, VCS_TYPE,
255 username, password, environ, VCS_TYPE,
256 registry=self.registry, acl_repo_name=self.acl_repo_name):
256 registry=self.registry, acl_repo_name=self.acl_repo_name):
257 return username
257 return username
258 if username and password:
258 if username and password:
259 # we mark that we actually executed authentication once, at
259 # we mark that we actually executed authentication once, at
260 # that point we can use the alternative auth code
260 # that point we can use the alternative auth code
261 self.initial_call = False
261 self.initial_call = False
262
262
263 return self.build_authentication()
263 return self.build_authentication()
264
264
265 __call__ = authenticate
265 __call__ = authenticate
266
266
267
267
268 def calculate_version_hash():
268 def calculate_version_hash():
269 return md5(
269 return md5(
270 config.get('beaker.session.secret', '') +
270 config.get('beaker.session.secret', '') +
271 rhodecode.__version__)[:8]
271 rhodecode.__version__)[:8]
272
272
273
273
274 def get_current_lang(request):
274 def get_current_lang(request):
275 # NOTE(marcink): remove after pyramid move
275 # NOTE(marcink): remove after pyramid move
276 try:
276 try:
277 return translation.get_lang()[0]
277 return translation.get_lang()[0]
278 except:
278 except:
279 pass
279 pass
280
280
281 return getattr(request, '_LOCALE_', None)
281 return getattr(request, '_LOCALE_', None)
282
282
283
283
284 def attach_context_attributes(context, request, user_id):
284 def attach_context_attributes(context, request, user_id):
285 """
285 """
286 Attach variables into template context called `c`, please note that
286 Attach variables into template context called `c`, please note that
287 request could be pylons or pyramid request in here.
287 request could be pylons or pyramid request in here.
288 """
288 """
289 rc_config = SettingsModel().get_all_settings(cache=True)
289 rc_config = SettingsModel().get_all_settings(cache=True)
290
290
291 context.rhodecode_version = rhodecode.__version__
291 context.rhodecode_version = rhodecode.__version__
292 context.rhodecode_edition = config.get('rhodecode.edition')
292 context.rhodecode_edition = config.get('rhodecode.edition')
293 # unique secret + version does not leak the version but keep consistency
293 # unique secret + version does not leak the version but keep consistency
294 context.rhodecode_version_hash = calculate_version_hash()
294 context.rhodecode_version_hash = calculate_version_hash()
295
295
296 # Default language set for the incoming request
296 # Default language set for the incoming request
297 context.language = get_current_lang(request)
297 context.language = get_current_lang(request)
298
298
299 # Visual options
299 # Visual options
300 context.visual = AttributeDict({})
300 context.visual = AttributeDict({})
301
301
302 # DB stored Visual Items
302 # DB stored Visual Items
303 context.visual.show_public_icon = str2bool(
303 context.visual.show_public_icon = str2bool(
304 rc_config.get('rhodecode_show_public_icon'))
304 rc_config.get('rhodecode_show_public_icon'))
305 context.visual.show_private_icon = str2bool(
305 context.visual.show_private_icon = str2bool(
306 rc_config.get('rhodecode_show_private_icon'))
306 rc_config.get('rhodecode_show_private_icon'))
307 context.visual.stylify_metatags = str2bool(
307 context.visual.stylify_metatags = str2bool(
308 rc_config.get('rhodecode_stylify_metatags'))
308 rc_config.get('rhodecode_stylify_metatags'))
309 context.visual.dashboard_items = safe_int(
309 context.visual.dashboard_items = safe_int(
310 rc_config.get('rhodecode_dashboard_items', 100))
310 rc_config.get('rhodecode_dashboard_items', 100))
311 context.visual.admin_grid_items = safe_int(
311 context.visual.admin_grid_items = safe_int(
312 rc_config.get('rhodecode_admin_grid_items', 100))
312 rc_config.get('rhodecode_admin_grid_items', 100))
313 context.visual.repository_fields = str2bool(
313 context.visual.repository_fields = str2bool(
314 rc_config.get('rhodecode_repository_fields'))
314 rc_config.get('rhodecode_repository_fields'))
315 context.visual.show_version = str2bool(
315 context.visual.show_version = str2bool(
316 rc_config.get('rhodecode_show_version'))
316 rc_config.get('rhodecode_show_version'))
317 context.visual.use_gravatar = str2bool(
317 context.visual.use_gravatar = str2bool(
318 rc_config.get('rhodecode_use_gravatar'))
318 rc_config.get('rhodecode_use_gravatar'))
319 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
319 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
320 context.visual.default_renderer = rc_config.get(
320 context.visual.default_renderer = rc_config.get(
321 'rhodecode_markup_renderer', 'rst')
321 'rhodecode_markup_renderer', 'rst')
322 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
322 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
323 context.visual.rhodecode_support_url = \
323 context.visual.rhodecode_support_url = \
324 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
324 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
325
325
326 context.pre_code = rc_config.get('rhodecode_pre_code')
326 context.pre_code = rc_config.get('rhodecode_pre_code')
327 context.post_code = rc_config.get('rhodecode_post_code')
327 context.post_code = rc_config.get('rhodecode_post_code')
328 context.rhodecode_name = rc_config.get('rhodecode_title')
328 context.rhodecode_name = rc_config.get('rhodecode_title')
329 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
329 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
330 # if we have specified default_encoding in the request, it has more
330 # if we have specified default_encoding in the request, it has more
331 # priority
331 # priority
332 if request.GET.get('default_encoding'):
332 if request.GET.get('default_encoding'):
333 context.default_encodings.insert(0, request.GET.get('default_encoding'))
333 context.default_encodings.insert(0, request.GET.get('default_encoding'))
334 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
334 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
335
335
336 # INI stored
336 # INI stored
337 context.labs_active = str2bool(
337 context.labs_active = str2bool(
338 config.get('labs_settings_active', 'false'))
338 config.get('labs_settings_active', 'false'))
339 context.visual.allow_repo_location_change = str2bool(
339 context.visual.allow_repo_location_change = str2bool(
340 config.get('allow_repo_location_change', True))
340 config.get('allow_repo_location_change', True))
341 context.visual.allow_custom_hooks_settings = str2bool(
341 context.visual.allow_custom_hooks_settings = str2bool(
342 config.get('allow_custom_hooks_settings', True))
342 config.get('allow_custom_hooks_settings', True))
343 context.debug_style = str2bool(config.get('debug_style', False))
343 context.debug_style = str2bool(config.get('debug_style', False))
344
344
345 context.rhodecode_instanceid = config.get('instance_id')
345 context.rhodecode_instanceid = config.get('instance_id')
346
346
347 context.visual.cut_off_limit_diff = safe_int(
347 context.visual.cut_off_limit_diff = safe_int(
348 config.get('cut_off_limit_diff'))
348 config.get('cut_off_limit_diff'))
349 context.visual.cut_off_limit_file = safe_int(
349 context.visual.cut_off_limit_file = safe_int(
350 config.get('cut_off_limit_file'))
350 config.get('cut_off_limit_file'))
351
351
352 # AppEnlight
352 # AppEnlight
353 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
353 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
354 context.appenlight_api_public_key = config.get(
354 context.appenlight_api_public_key = config.get(
355 'appenlight.api_public_key', '')
355 'appenlight.api_public_key', '')
356 context.appenlight_server_url = config.get('appenlight.server_url', '')
356 context.appenlight_server_url = config.get('appenlight.server_url', '')
357
357
358 # JS template context
358 # JS template context
359 context.template_context = {
359 context.template_context = {
360 'repo_name': None,
360 'repo_name': None,
361 'repo_type': None,
361 'repo_type': None,
362 'repo_landing_commit': None,
362 'repo_landing_commit': None,
363 'rhodecode_user': {
363 'rhodecode_user': {
364 'username': None,
364 'username': None,
365 'email': None,
365 'email': None,
366 'notification_status': False
366 'notification_status': False
367 },
367 },
368 'visual': {
368 'visual': {
369 'default_renderer': None
369 'default_renderer': None
370 },
370 },
371 'commit_data': {
371 'commit_data': {
372 'commit_id': None
372 'commit_id': None
373 },
373 },
374 'pull_request_data': {'pull_request_id': None},
374 'pull_request_data': {'pull_request_id': None},
375 'timeago': {
375 'timeago': {
376 'refresh_time': 120 * 1000,
376 'refresh_time': 120 * 1000,
377 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
377 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
378 },
378 },
379 'pylons_dispatch': {
379 'pylons_dispatch': {
380 # 'controller': request.environ['pylons.routes_dict']['controller'],
380 # 'controller': request.environ['pylons.routes_dict']['controller'],
381 # 'action': request.environ['pylons.routes_dict']['action'],
381 # 'action': request.environ['pylons.routes_dict']['action'],
382 },
382 },
383 'pyramid_dispatch': {
383 'pyramid_dispatch': {
384
384
385 },
385 },
386 'extra': {'plugins': {}}
386 'extra': {'plugins': {}}
387 }
387 }
388 # END CONFIG VARS
388 # END CONFIG VARS
389
389
390 # TODO: This dosn't work when called from pylons compatibility tween.
390 # TODO: This dosn't work when called from pylons compatibility tween.
391 # Fix this and remove it from base controller.
391 # Fix this and remove it from base controller.
392 # context.repo_name = get_repo_slug(request) # can be empty
392 # context.repo_name = get_repo_slug(request) # can be empty
393
393
394 diffmode = 'sideside'
394 diffmode = 'sideside'
395 if request.GET.get('diffmode'):
395 if request.GET.get('diffmode'):
396 if request.GET['diffmode'] == 'unified':
396 if request.GET['diffmode'] == 'unified':
397 diffmode = 'unified'
397 diffmode = 'unified'
398 elif request.session.get('diffmode'):
398 elif request.session.get('diffmode'):
399 diffmode = request.session['diffmode']
399 diffmode = request.session['diffmode']
400
400
401 context.diffmode = diffmode
401 context.diffmode = diffmode
402
402
403 if request.session.get('diffmode') != diffmode:
403 if request.session.get('diffmode') != diffmode:
404 request.session['diffmode'] = diffmode
404 request.session['diffmode'] = diffmode
405
405
406 context.csrf_token = auth.get_csrf_token()
406 context.csrf_token = auth.get_csrf_token(session=request.session)
407 context.backends = rhodecode.BACKENDS.keys()
407 context.backends = rhodecode.BACKENDS.keys()
408 context.backends.sort()
408 context.backends.sort()
409 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
409 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
410 context.pyramid_request = pyramid.threadlocal.get_current_request()
410
411 # NOTE(marcink): when migrated to pyramid we don't need to set this anymore,
412 # given request will ALWAYS be pyramid one
413 pyramid_request = pyramid.threadlocal.get_current_request()
414 context.pyramid_request = pyramid_request
415
416 # web case
417 if hasattr(pyramid_request, 'user'):
418 context.auth_user = pyramid_request.user
419 context.rhodecode_user = pyramid_request.user
420
421 # api case
422 if hasattr(pyramid_request, 'rpc_user'):
423 context.auth_user = pyramid_request.rpc_user
424 context.rhodecode_user = pyramid_request.rpc_user
411
425
412 # attach the whole call context to the request
426 # attach the whole call context to the request
413 request.call_context = context
427 request.call_context = context
414
428
415
429
416 def get_auth_user(request):
430 def get_auth_user(request):
417 environ = request.environ
431 environ = request.environ
418 session = request.session
432 session = request.session
419
433
420 ip_addr = get_ip_addr(environ)
434 ip_addr = get_ip_addr(environ)
421 # make sure that we update permissions each time we call controller
435 # make sure that we update permissions each time we call controller
422 _auth_token = (request.GET.get('auth_token', '') or
436 _auth_token = (request.GET.get('auth_token', '') or
423 request.GET.get('api_key', ''))
437 request.GET.get('api_key', ''))
424
438
425 if _auth_token:
439 if _auth_token:
426 # when using API_KEY we assume user exists, and
440 # when using API_KEY we assume user exists, and
427 # doesn't need auth based on cookies.
441 # doesn't need auth based on cookies.
428 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
442 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
429 authenticated = False
443 authenticated = False
430 else:
444 else:
431 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
445 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
432 try:
446 try:
433 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
447 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
434 ip_addr=ip_addr)
448 ip_addr=ip_addr)
435 except UserCreationError as e:
449 except UserCreationError as e:
436 h.flash(e, 'error')
450 h.flash(e, 'error')
437 # container auth or other auth functions that create users
451 # container auth or other auth functions that create users
438 # on the fly can throw this exception signaling that there's
452 # on the fly can throw this exception signaling that there's
439 # issue with user creation, explanation should be provided
453 # issue with user creation, explanation should be provided
440 # in Exception itself. We then create a simple blank
454 # in Exception itself. We then create a simple blank
441 # AuthUser
455 # AuthUser
442 auth_user = AuthUser(ip_addr=ip_addr)
456 auth_user = AuthUser(ip_addr=ip_addr)
443
457
444 if password_changed(auth_user, session):
458 if password_changed(auth_user, session):
445 session.invalidate()
459 session.invalidate()
446 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
460 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
447 auth_user = AuthUser(ip_addr=ip_addr)
461 auth_user = AuthUser(ip_addr=ip_addr)
448
462
449 authenticated = cookie_store.get('is_authenticated')
463 authenticated = cookie_store.get('is_authenticated')
450
464
451 if not auth_user.is_authenticated and auth_user.is_user_object:
465 if not auth_user.is_authenticated and auth_user.is_user_object:
452 # user is not authenticated and not empty
466 # user is not authenticated and not empty
453 auth_user.set_authenticated(authenticated)
467 auth_user.set_authenticated(authenticated)
454
468
455 return auth_user
469 return auth_user
456
470
457
471
458 class BaseController(WSGIController):
472 class BaseController(WSGIController):
459
473
460 def __before__(self):
474 def __before__(self):
461 """
475 """
462 __before__ is called before controller methods and after __call__
476 __before__ is called before controller methods and after __call__
463 """
477 """
464 # on each call propagate settings calls into global settings.
478 # on each call propagate settings calls into global settings.
465 set_rhodecode_config(config)
479 set_rhodecode_config(config)
466 attach_context_attributes(c, request, c.rhodecode_user.user_id)
480 attach_context_attributes(c, request, self._rhodecode_user.user_id)
467
481
468 # TODO: Remove this when fixed in attach_context_attributes()
482 # TODO: Remove this when fixed in attach_context_attributes()
469 c.repo_name = get_repo_slug(request) # can be empty
483 c.repo_name = get_repo_slug(request) # can be empty
470
484
471 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
485 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
472 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
486 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
473 self.sa = meta.Session
487 self.sa = meta.Session
474 self.scm_model = ScmModel(self.sa)
488 self.scm_model = ScmModel(self.sa)
475
489
476 # set user language
490 # set user language
477 user_lang = getattr(c.pyramid_request, '_LOCALE_', None)
491 user_lang = getattr(c.pyramid_request, '_LOCALE_', None)
478 if user_lang:
492 if user_lang:
479 translation.set_lang(user_lang)
493 translation.set_lang(user_lang)
480 log.debug('set language to %s for user %s',
494 log.debug('set language to %s for user %s',
481 user_lang, self._rhodecode_user)
495 user_lang, self._rhodecode_user)
482
496
483 def _dispatch_redirect(self, with_url, environ, start_response):
497 def _dispatch_redirect(self, with_url, environ, start_response):
484 resp = HTTPFound(with_url)
498 resp = HTTPFound(with_url)
485 environ['SCRIPT_NAME'] = '' # handle prefix middleware
499 environ['SCRIPT_NAME'] = '' # handle prefix middleware
486 environ['PATH_INFO'] = with_url
500 environ['PATH_INFO'] = with_url
487 return resp(environ, start_response)
501 return resp(environ, start_response)
488
502
489 def __call__(self, environ, start_response):
503 def __call__(self, environ, start_response):
490 """Invoke the Controller"""
504 """Invoke the Controller"""
491 # WSGIController.__call__ dispatches to the Controller method
505 # WSGIController.__call__ dispatches to the Controller method
492 # the request is routed to. This routing information is
506 # the request is routed to. This routing information is
493 # available in environ['pylons.routes_dict']
507 # available in environ['pylons.routes_dict']
494 from rhodecode.lib import helpers as h
508 from rhodecode.lib import helpers as h
495
509
496 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
510 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
497 if environ.get('debugtoolbar.wants_pylons_context', False):
511 if environ.get('debugtoolbar.wants_pylons_context', False):
498 environ['debugtoolbar.pylons_context'] = c._current_obj()
512 environ['debugtoolbar.pylons_context'] = c._current_obj()
499
513
500 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
514 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
501 environ['pylons.routes_dict']['action']])
515 environ['pylons.routes_dict']['action']])
502
516
503 self.rc_config = SettingsModel().get_all_settings(cache=True)
517 self.rc_config = SettingsModel().get_all_settings(cache=True)
504 self.ip_addr = get_ip_addr(environ)
518 self.ip_addr = get_ip_addr(environ)
505
519
506 # The rhodecode auth user is looked up and passed through the
520 # The rhodecode auth user is looked up and passed through the
507 # environ by the pylons compatibility tween in pyramid.
521 # environ by the pylons compatibility tween in pyramid.
508 # So we can just grab it from there.
522 # So we can just grab it from there.
509 auth_user = environ['rc_auth_user']
523 auth_user = environ['rc_auth_user']
510
524
511 # set globals for auth user
525 # set globals for auth user
512 request.user = auth_user
526 request.user = auth_user
513 c.rhodecode_user = self._rhodecode_user = auth_user
527 self._rhodecode_user = auth_user
514
528
515 log.info('IP: %s User: %s accessed %s [%s]' % (
529 log.info('IP: %s User: %s accessed %s [%s]' % (
516 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
530 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
517 _route_name)
531 _route_name)
518 )
532 )
519
533
520 user_obj = auth_user.get_instance()
534 user_obj = auth_user.get_instance()
521 if user_obj and user_obj.user_data.get('force_password_change'):
535 if user_obj and user_obj.user_data.get('force_password_change'):
522 h.flash('You are required to change your password', 'warning',
536 h.flash('You are required to change your password', 'warning',
523 ignore_duplicate=True)
537 ignore_duplicate=True)
524 return self._dispatch_redirect(
538 return self._dispatch_redirect(
525 url('my_account_password'), environ, start_response)
539 url('my_account_password'), environ, start_response)
526
540
527 return WSGIController.__call__(self, environ, start_response)
541 return WSGIController.__call__(self, environ, start_response)
528
542
529
543
530 class BaseRepoController(BaseController):
544 class BaseRepoController(BaseController):
531 """
545 """
532 Base class for controllers responsible for loading all needed data for
546 Base class for controllers responsible for loading all needed data for
533 repository loaded items are
547 repository loaded items are
534
548
535 c.rhodecode_repo: instance of scm repository
549 c.rhodecode_repo: instance of scm repository
536 c.rhodecode_db_repo: instance of db
550 c.rhodecode_db_repo: instance of db
537 c.repository_requirements_missing: shows that repository specific data
551 c.repository_requirements_missing: shows that repository specific data
538 could not be displayed due to the missing requirements
552 could not be displayed due to the missing requirements
539 c.repository_pull_requests: show number of open pull requests
553 c.repository_pull_requests: show number of open pull requests
540 """
554 """
541
555
542 def __before__(self):
556 def __before__(self):
543 super(BaseRepoController, self).__before__()
557 super(BaseRepoController, self).__before__()
544 if c.repo_name: # extracted from routes
558 if c.repo_name: # extracted from routes
545 db_repo = Repository.get_by_repo_name(c.repo_name)
559 db_repo = Repository.get_by_repo_name(c.repo_name)
546 if not db_repo:
560 if not db_repo:
547 return
561 return
548
562
549 log.debug(
563 log.debug(
550 'Found repository in database %s with state `%s`',
564 'Found repository in database %s with state `%s`',
551 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
565 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
552 route = getattr(request.environ.get('routes.route'), 'name', '')
566 route = getattr(request.environ.get('routes.route'), 'name', '')
553
567
554 # allow to delete repos that are somehow damages in filesystem
568 # allow to delete repos that are somehow damages in filesystem
555 if route in ['delete_repo']:
569 if route in ['delete_repo']:
556 return
570 return
557
571
558 if db_repo.repo_state in [Repository.STATE_PENDING]:
572 if db_repo.repo_state in [Repository.STATE_PENDING]:
559 if route in ['repo_creating_home']:
573 if route in ['repo_creating_home']:
560 return
574 return
561 check_url = url('repo_creating_home', repo_name=c.repo_name)
575 check_url = url('repo_creating_home', repo_name=c.repo_name)
562 return redirect(check_url)
576 return redirect(check_url)
563
577
564 self.rhodecode_db_repo = db_repo
578 self.rhodecode_db_repo = db_repo
565
579
566 missing_requirements = False
580 missing_requirements = False
567 try:
581 try:
568 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
582 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
569 except RepositoryRequirementError as e:
583 except RepositoryRequirementError as e:
570 missing_requirements = True
584 missing_requirements = True
571 self._handle_missing_requirements(e)
585 self._handle_missing_requirements(e)
572
586
573 if self.rhodecode_repo is None and not missing_requirements:
587 if self.rhodecode_repo is None and not missing_requirements:
574 log.error('%s this repository is present in database but it '
588 log.error('%s this repository is present in database but it '
575 'cannot be created as an scm instance', c.repo_name)
589 'cannot be created as an scm instance', c.repo_name)
576
590
577 h.flash(_(
591 h.flash(_(
578 "The repository at %(repo_name)s cannot be located.") %
592 "The repository at %(repo_name)s cannot be located.") %
579 {'repo_name': c.repo_name},
593 {'repo_name': c.repo_name},
580 category='error', ignore_duplicate=True)
594 category='error', ignore_duplicate=True)
581 redirect(h.route_path('home'))
595 redirect(h.route_path('home'))
582
596
583 # update last change according to VCS data
597 # update last change according to VCS data
584 if not missing_requirements:
598 if not missing_requirements:
585 commit = db_repo.get_commit(
599 commit = db_repo.get_commit(
586 pre_load=["author", "date", "message", "parents"])
600 pre_load=["author", "date", "message", "parents"])
587 db_repo.update_commit_cache(commit)
601 db_repo.update_commit_cache(commit)
588
602
589 # Prepare context
603 # Prepare context
590 c.rhodecode_db_repo = db_repo
604 c.rhodecode_db_repo = db_repo
591 c.rhodecode_repo = self.rhodecode_repo
605 c.rhodecode_repo = self.rhodecode_repo
592 c.repository_requirements_missing = missing_requirements
606 c.repository_requirements_missing = missing_requirements
593
607
594 self._update_global_counters(self.scm_model, db_repo)
608 self._update_global_counters(self.scm_model, db_repo)
595
609
596 def _update_global_counters(self, scm_model, db_repo):
610 def _update_global_counters(self, scm_model, db_repo):
597 """
611 """
598 Base variables that are exposed to every page of repository
612 Base variables that are exposed to every page of repository
599 """
613 """
600 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
614 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
601
615
602 def _handle_missing_requirements(self, error):
616 def _handle_missing_requirements(self, error):
603 self.rhodecode_repo = None
617 self.rhodecode_repo = None
604 log.error(
618 log.error(
605 'Requirements are missing for repository %s: %s',
619 'Requirements are missing for repository %s: %s',
606 c.repo_name, error.message)
620 c.repo_name, error.message)
607
621
608 summary_url = h.route_path('repo_summary', repo_name=c.repo_name)
622 summary_url = h.route_path('repo_summary', repo_name=c.repo_name)
609 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
623 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
610 settings_update_url = url('repo', repo_name=c.repo_name)
624 settings_update_url = url('repo', repo_name=c.repo_name)
611 path = request.path
625 path = request.path
612 should_redirect = (
626 should_redirect = (
613 path not in (summary_url, settings_update_url)
627 path not in (summary_url, settings_update_url)
614 and '/settings' not in path or path == statistics_url
628 and '/settings' not in path or path == statistics_url
615 )
629 )
616 if should_redirect:
630 if should_redirect:
617 redirect(summary_url)
631 redirect(summary_url)
@@ -1,329 +1,331 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import io
20 import io
21 import re
21 import re
22 import datetime
22 import datetime
23 import logging
23 import logging
24 import pylons
24 import pylons
25 import Queue
25 import Queue
26 import subprocess32
26 import subprocess32
27 import os
27 import os
28
28
29 from pyramid.i18n import get_localizer
29 from pyramid.i18n import get_localizer
30 from pyramid.threadlocal import get_current_request
30 from pyramid.threadlocal import get_current_request
31 from pyramid.interfaces import IRoutesMapper
31 from pyramid.interfaces import IRoutesMapper
32 from pyramid.settings import asbool
32 from pyramid.settings import asbool
33 from pyramid.path import AssetResolver
33 from pyramid.path import AssetResolver
34 from threading import Thread
34 from threading import Thread
35
35
36 from rhodecode.translation import _ as tsf
36 from rhodecode.translation import _ as tsf
37 from rhodecode.config.jsroutes import generate_jsroutes_content
37 from rhodecode.config.jsroutes import generate_jsroutes_content
38
38
39 import rhodecode
39 import rhodecode
40
40
41 from pylons.i18n.translation import _get_translator
41 from pylons.i18n.translation import _get_translator
42 from pylons.util import ContextObj
42 from pylons.util import ContextObj
43 from routes.util import URLGenerator
43 from routes.util import URLGenerator
44
44
45 from rhodecode.lib.base import attach_context_attributes, get_auth_user
45 from rhodecode.lib.base import attach_context_attributes, get_auth_user
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def add_renderer_globals(event):
50 def add_renderer_globals(event):
51 from rhodecode.lib import helpers
52
53 # NOTE(marcink):
51 # Put pylons stuff into the context. This will be removed as soon as
54 # Put pylons stuff into the context. This will be removed as soon as
52 # migration to pyramid is finished.
55 # migration to pyramid is finished.
53 conf = pylons.config._current_obj()
54 event['h'] = conf.get('pylons.h')
55 event['c'] = pylons.tmpl_context
56 event['c'] = pylons.tmpl_context
56 event['url'] = pylons.url
57 event['url'] = pylons.url
57
58
58 # TODO: When executed in pyramid view context the request is not available
59 # TODO: When executed in pyramid view context the request is not available
59 # in the event. Find a better solution to get the request.
60 # in the event. Find a better solution to get the request.
60 request = event['request'] or get_current_request()
61 request = event['request'] or get_current_request()
61
62
62 # Add Pyramid translation as '_' to context
63 # Add Pyramid translation as '_' to context
63 event['_'] = request.translate
64 event['_'] = request.translate
64 event['_ungettext'] = request.plularize
65 event['_ungettext'] = request.plularize
66 event['h'] = helpers
65
67
66
68
67 def add_localizer(event):
69 def add_localizer(event):
68 request = event.request
70 request = event.request
69 localizer = get_localizer(request)
71 localizer = get_localizer(request)
70
72
71 def auto_translate(*args, **kwargs):
73 def auto_translate(*args, **kwargs):
72 return localizer.translate(tsf(*args, **kwargs))
74 return localizer.translate(tsf(*args, **kwargs))
73
75
74 request.localizer = localizer
76 request.localizer = localizer
75 request.translate = auto_translate
77 request.translate = auto_translate
76 request.plularize = localizer.pluralize
78 request.plularize = localizer.pluralize
77
79
78
80
79 def set_user_lang(event):
81 def set_user_lang(event):
80 request = event.request
82 request = event.request
81 cur_user = getattr(request, 'user', None)
83 cur_user = getattr(request, 'user', None)
82
84
83 if cur_user:
85 if cur_user:
84 user_lang = cur_user.get_instance().user_data.get('language')
86 user_lang = cur_user.get_instance().user_data.get('language')
85 if user_lang:
87 if user_lang:
86 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
88 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
87 event.request._LOCALE_ = user_lang
89 event.request._LOCALE_ = user_lang
88
90
89
91
90 def add_request_user_context(event):
92 def add_request_user_context(event):
91 """
93 """
92 Adds auth user into request context
94 Adds auth user into request context
93 """
95 """
94 request = event.request
96 request = event.request
95
97
96 if hasattr(request, 'vcs_call'):
98 if hasattr(request, 'vcs_call'):
97 # skip vcs calls
99 # skip vcs calls
98 return
100 return
99
101
100 if hasattr(request, 'rpc_method'):
102 if hasattr(request, 'rpc_method'):
101 # skip api calls
103 # skip api calls
102 return
104 return
103
105
104 auth_user = get_auth_user(request)
106 auth_user = get_auth_user(request)
105 request.user = auth_user
107 request.user = auth_user
106 request.environ['rc_auth_user'] = auth_user
108 request.environ['rc_auth_user'] = auth_user
107
109
108
110
109 def add_pylons_context(event):
111 def add_pylons_context(event):
110 request = event.request
112 request = event.request
111
113
112 config = rhodecode.CONFIG
114 config = rhodecode.CONFIG
113 environ = request.environ
115 environ = request.environ
114 session = request.session
116 session = request.session
115
117
116 if hasattr(request, 'vcs_call'):
118 if hasattr(request, 'vcs_call'):
117 # skip vcs calls
119 # skip vcs calls
118 return
120 return
119
121
120 # Setup pylons globals.
122 # Setup pylons globals.
121 pylons.config._push_object(config)
123 pylons.config._push_object(config)
122 pylons.request._push_object(request)
124 pylons.request._push_object(request)
123 pylons.session._push_object(session)
125 pylons.session._push_object(session)
124 pylons.translator._push_object(_get_translator(config.get('lang')))
126 pylons.translator._push_object(_get_translator(config.get('lang')))
125
127
126 pylons.url._push_object(URLGenerator(config['routes.map'], environ))
128 pylons.url._push_object(URLGenerator(config['routes.map'], environ))
127 session_key = (
129 session_key = (
128 config['pylons.environ_config'].get('session', 'beaker.session'))
130 config['pylons.environ_config'].get('session', 'beaker.session'))
129 environ[session_key] = session
131 environ[session_key] = session
130
132
131 if hasattr(request, 'rpc_method'):
133 if hasattr(request, 'rpc_method'):
132 # skip api calls
134 # skip api calls
133 return
135 return
134
136
135 # Setup the pylons context object ('c')
137 # Setup the pylons context object ('c')
136 context = ContextObj()
138 context = ContextObj()
137 context.rhodecode_user = request.user
139 context.rhodecode_user = request.user
138 attach_context_attributes(context, request, request.user.user_id)
140 attach_context_attributes(context, request, request.user.user_id)
139 pylons.tmpl_context._push_object(context)
141 pylons.tmpl_context._push_object(context)
140
142
141
143
142 def scan_repositories_if_enabled(event):
144 def scan_repositories_if_enabled(event):
143 """
145 """
144 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
146 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
145 does a repository scan if enabled in the settings.
147 does a repository scan if enabled in the settings.
146 """
148 """
147 settings = event.app.registry.settings
149 settings = event.app.registry.settings
148 vcs_server_enabled = settings['vcs.server.enable']
150 vcs_server_enabled = settings['vcs.server.enable']
149 import_on_startup = settings['startup.import_repos']
151 import_on_startup = settings['startup.import_repos']
150 if vcs_server_enabled and import_on_startup:
152 if vcs_server_enabled and import_on_startup:
151 from rhodecode.model.scm import ScmModel
153 from rhodecode.model.scm import ScmModel
152 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
154 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
153 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
155 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
154 repo2db_mapper(repositories, remove_obsolete=False)
156 repo2db_mapper(repositories, remove_obsolete=False)
155
157
156
158
157 def write_metadata_if_needed(event):
159 def write_metadata_if_needed(event):
158 """
160 """
159 Writes upgrade metadata
161 Writes upgrade metadata
160 """
162 """
161 import rhodecode
163 import rhodecode
162 from rhodecode.lib import system_info
164 from rhodecode.lib import system_info
163 from rhodecode.lib import ext_json
165 from rhodecode.lib import ext_json
164
166
165 def write():
167 def write():
166 fname = '.rcmetadata.json'
168 fname = '.rcmetadata.json'
167 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
169 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
168 metadata_destination = os.path.join(ini_loc, fname)
170 metadata_destination = os.path.join(ini_loc, fname)
169
171
170 configuration = system_info.SysInfo(
172 configuration = system_info.SysInfo(
171 system_info.rhodecode_config)()['value']
173 system_info.rhodecode_config)()['value']
172 license_token = configuration['config']['license_token']
174 license_token = configuration['config']['license_token']
173 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
175 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
174 del dbinfo['url']
176 del dbinfo['url']
175 metadata = dict(
177 metadata = dict(
176 desc='upgrade metadata info',
178 desc='upgrade metadata info',
177 license_token=license_token,
179 license_token=license_token,
178 created_on=datetime.datetime.utcnow().isoformat(),
180 created_on=datetime.datetime.utcnow().isoformat(),
179 usage=system_info.SysInfo(system_info.usage_info)()['value'],
181 usage=system_info.SysInfo(system_info.usage_info)()['value'],
180 platform=system_info.SysInfo(system_info.platform_type)()['value'],
182 platform=system_info.SysInfo(system_info.platform_type)()['value'],
181 database=dbinfo,
183 database=dbinfo,
182 cpu=system_info.SysInfo(system_info.cpu)()['value'],
184 cpu=system_info.SysInfo(system_info.cpu)()['value'],
183 memory=system_info.SysInfo(system_info.memory)()['value'],
185 memory=system_info.SysInfo(system_info.memory)()['value'],
184 )
186 )
185
187
186 with open(metadata_destination, 'wb') as f:
188 with open(metadata_destination, 'wb') as f:
187 f.write(ext_json.json.dumps(metadata))
189 f.write(ext_json.json.dumps(metadata))
188
190
189 settings = event.app.registry.settings
191 settings = event.app.registry.settings
190 if settings.get('metadata.skip'):
192 if settings.get('metadata.skip'):
191 return
193 return
192
194
193 try:
195 try:
194 write()
196 write()
195 except Exception:
197 except Exception:
196 pass
198 pass
197
199
198
200
199 def write_js_routes_if_enabled(event):
201 def write_js_routes_if_enabled(event):
200 registry = event.app.registry
202 registry = event.app.registry
201
203
202 mapper = registry.queryUtility(IRoutesMapper)
204 mapper = registry.queryUtility(IRoutesMapper)
203 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
205 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
204
206
205 def _extract_route_information(route):
207 def _extract_route_information(route):
206 """
208 """
207 Convert a route into tuple(name, path, args), eg:
209 Convert a route into tuple(name, path, args), eg:
208 ('show_user', '/profile/%(username)s', ['username'])
210 ('show_user', '/profile/%(username)s', ['username'])
209 """
211 """
210
212
211 routepath = route.pattern
213 routepath = route.pattern
212 pattern = route.pattern
214 pattern = route.pattern
213
215
214 def replace(matchobj):
216 def replace(matchobj):
215 if matchobj.group(1):
217 if matchobj.group(1):
216 return "%%(%s)s" % matchobj.group(1).split(':')[0]
218 return "%%(%s)s" % matchobj.group(1).split(':')[0]
217 else:
219 else:
218 return "%%(%s)s" % matchobj.group(2)
220 return "%%(%s)s" % matchobj.group(2)
219
221
220 routepath = _argument_prog.sub(replace, routepath)
222 routepath = _argument_prog.sub(replace, routepath)
221
223
222 if not routepath.startswith('/'):
224 if not routepath.startswith('/'):
223 routepath = '/'+routepath
225 routepath = '/'+routepath
224
226
225 return (
227 return (
226 route.name,
228 route.name,
227 routepath,
229 routepath,
228 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
230 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
229 for arg in _argument_prog.findall(pattern)]
231 for arg in _argument_prog.findall(pattern)]
230 )
232 )
231
233
232 def get_routes():
234 def get_routes():
233 # pylons routes
235 # pylons routes
234 for route in rhodecode.CONFIG['routes.map'].jsroutes():
236 for route in rhodecode.CONFIG['routes.map'].jsroutes():
235 yield route
237 yield route
236
238
237 # pyramid routes
239 # pyramid routes
238 for route in mapper.get_routes():
240 for route in mapper.get_routes():
239 if not route.name.startswith('__'):
241 if not route.name.startswith('__'):
240 yield _extract_route_information(route)
242 yield _extract_route_information(route)
241
243
242 if asbool(registry.settings.get('generate_js_files', 'false')):
244 if asbool(registry.settings.get('generate_js_files', 'false')):
243 static_path = AssetResolver().resolve('rhodecode:public').abspath()
245 static_path = AssetResolver().resolve('rhodecode:public').abspath()
244 jsroutes = get_routes()
246 jsroutes = get_routes()
245 jsroutes_file_content = generate_jsroutes_content(jsroutes)
247 jsroutes_file_content = generate_jsroutes_content(jsroutes)
246 jsroutes_file_path = os.path.join(
248 jsroutes_file_path = os.path.join(
247 static_path, 'js', 'rhodecode', 'routes.js')
249 static_path, 'js', 'rhodecode', 'routes.js')
248
250
249 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
251 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
250 f.write(jsroutes_file_content)
252 f.write(jsroutes_file_content)
251
253
252
254
253 class Subscriber(object):
255 class Subscriber(object):
254 """
256 """
255 Base class for subscribers to the pyramid event system.
257 Base class for subscribers to the pyramid event system.
256 """
258 """
257 def __call__(self, event):
259 def __call__(self, event):
258 self.run(event)
260 self.run(event)
259
261
260 def run(self, event):
262 def run(self, event):
261 raise NotImplementedError('Subclass has to implement this.')
263 raise NotImplementedError('Subclass has to implement this.')
262
264
263
265
264 class AsyncSubscriber(Subscriber):
266 class AsyncSubscriber(Subscriber):
265 """
267 """
266 Subscriber that handles the execution of events in a separate task to not
268 Subscriber that handles the execution of events in a separate task to not
267 block the execution of the code which triggers the event. It puts the
269 block the execution of the code which triggers the event. It puts the
268 received events into a queue from which the worker process takes them in
270 received events into a queue from which the worker process takes them in
269 order.
271 order.
270 """
272 """
271 def __init__(self):
273 def __init__(self):
272 self._stop = False
274 self._stop = False
273 self._eventq = Queue.Queue()
275 self._eventq = Queue.Queue()
274 self._worker = self.create_worker()
276 self._worker = self.create_worker()
275 self._worker.start()
277 self._worker.start()
276
278
277 def __call__(self, event):
279 def __call__(self, event):
278 self._eventq.put(event)
280 self._eventq.put(event)
279
281
280 def create_worker(self):
282 def create_worker(self):
281 worker = Thread(target=self.do_work)
283 worker = Thread(target=self.do_work)
282 worker.daemon = True
284 worker.daemon = True
283 return worker
285 return worker
284
286
285 def stop_worker(self):
287 def stop_worker(self):
286 self._stop = False
288 self._stop = False
287 self._eventq.put(None)
289 self._eventq.put(None)
288 self._worker.join()
290 self._worker.join()
289
291
290 def do_work(self):
292 def do_work(self):
291 while not self._stop:
293 while not self._stop:
292 event = self._eventq.get()
294 event = self._eventq.get()
293 if event is not None:
295 if event is not None:
294 self.run(event)
296 self.run(event)
295
297
296
298
297 class AsyncSubprocessSubscriber(AsyncSubscriber):
299 class AsyncSubprocessSubscriber(AsyncSubscriber):
298 """
300 """
299 Subscriber that uses the subprocess32 module to execute a command if an
301 Subscriber that uses the subprocess32 module to execute a command if an
300 event is received. Events are handled asynchronously.
302 event is received. Events are handled asynchronously.
301 """
303 """
302
304
303 def __init__(self, cmd, timeout=None):
305 def __init__(self, cmd, timeout=None):
304 super(AsyncSubprocessSubscriber, self).__init__()
306 super(AsyncSubprocessSubscriber, self).__init__()
305 self._cmd = cmd
307 self._cmd = cmd
306 self._timeout = timeout
308 self._timeout = timeout
307
309
308 def run(self, event):
310 def run(self, event):
309 cmd = self._cmd
311 cmd = self._cmd
310 timeout = self._timeout
312 timeout = self._timeout
311 log.debug('Executing command %s.', cmd)
313 log.debug('Executing command %s.', cmd)
312
314
313 try:
315 try:
314 output = subprocess32.check_output(
316 output = subprocess32.check_output(
315 cmd, timeout=timeout, stderr=subprocess32.STDOUT)
317 cmd, timeout=timeout, stderr=subprocess32.STDOUT)
316 log.debug('Command finished %s', cmd)
318 log.debug('Command finished %s', cmd)
317 if output:
319 if output:
318 log.debug('Command output: %s', output)
320 log.debug('Command output: %s', output)
319 except subprocess32.TimeoutExpired as e:
321 except subprocess32.TimeoutExpired as e:
320 log.exception('Timeout while executing command.')
322 log.exception('Timeout while executing command.')
321 if e.output:
323 if e.output:
322 log.error('Command output: %s', e.output)
324 log.error('Command output: %s', e.output)
323 except subprocess32.CalledProcessError as e:
325 except subprocess32.CalledProcessError as e:
324 log.exception('Error while executing command.')
326 log.exception('Error while executing command.')
325 if e.output:
327 if e.output:
326 log.error('Command output: %s', e.output)
328 log.error('Command output: %s', e.output)
327 except:
329 except:
328 log.exception(
330 log.exception(
329 'Exception while executing command %s.', cmd)
331 'Exception while executing command %s.', cmd)
@@ -1,987 +1,987 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.controllers.files import FilesController
26 from rhodecode.controllers.files import FilesController
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.compat import OrderedDict
28 from rhodecode.lib.compat import OrderedDict
29 from rhodecode.lib.ext_json import json
29 from rhodecode.lib.ext_json import json
30 from rhodecode.lib.vcs import nodes
30 from rhodecode.lib.vcs import nodes
31
31
32 from rhodecode.lib.vcs.conf import settings
32 from rhodecode.lib.vcs.conf import settings
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 url, assert_session_flash, assert_not_in_session_flash)
34 url, assert_session_flash, assert_not_in_session_flash)
35 from rhodecode.tests.fixture import Fixture
35 from rhodecode.tests.fixture import Fixture
36
36
37 fixture = Fixture()
37 fixture = Fixture()
38
38
39 NODE_HISTORY = {
39 NODE_HISTORY = {
40 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
40 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
41 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
41 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
42 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
42 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
43 }
43 }
44
44
45
45
46
46
47 @pytest.mark.usefixtures("app")
47 @pytest.mark.usefixtures("app")
48 class TestFilesController:
48 class TestFilesController:
49
49
50 def test_index(self, backend):
50 def test_index(self, backend):
51 response = self.app.get(url(
51 response = self.app.get(url(
52 controller='files', action='index',
52 controller='files', action='index',
53 repo_name=backend.repo_name, revision='tip', f_path='/'))
53 repo_name=backend.repo_name, revision='tip', f_path='/'))
54 commit = backend.repo.get_commit()
54 commit = backend.repo.get_commit()
55
55
56 params = {
56 params = {
57 'repo_name': backend.repo_name,
57 'repo_name': backend.repo_name,
58 'commit_id': commit.raw_id,
58 'commit_id': commit.raw_id,
59 'date': commit.date
59 'date': commit.date
60 }
60 }
61 assert_dirs_in_response(response, ['docs', 'vcs'], params)
61 assert_dirs_in_response(response, ['docs', 'vcs'], params)
62 files = [
62 files = [
63 '.gitignore',
63 '.gitignore',
64 '.hgignore',
64 '.hgignore',
65 '.hgtags',
65 '.hgtags',
66 # TODO: missing in Git
66 # TODO: missing in Git
67 # '.travis.yml',
67 # '.travis.yml',
68 'MANIFEST.in',
68 'MANIFEST.in',
69 'README.rst',
69 'README.rst',
70 # TODO: File is missing in svn repository
70 # TODO: File is missing in svn repository
71 # 'run_test_and_report.sh',
71 # 'run_test_and_report.sh',
72 'setup.cfg',
72 'setup.cfg',
73 'setup.py',
73 'setup.py',
74 'test_and_report.sh',
74 'test_and_report.sh',
75 'tox.ini',
75 'tox.ini',
76 ]
76 ]
77 assert_files_in_response(response, files, params)
77 assert_files_in_response(response, files, params)
78 assert_timeago_in_response(response, files, params)
78 assert_timeago_in_response(response, files, params)
79
79
80 def test_index_links_submodules_with_absolute_url(self, backend_hg):
80 def test_index_links_submodules_with_absolute_url(self, backend_hg):
81 repo = backend_hg['subrepos']
81 repo = backend_hg['subrepos']
82 response = self.app.get(url(
82 response = self.app.get(url(
83 controller='files', action='index',
83 controller='files', action='index',
84 repo_name=repo.repo_name, revision='tip', f_path='/'))
84 repo_name=repo.repo_name, revision='tip', f_path='/'))
85 assert_response = response.assert_response()
85 assert_response = response.assert_response()
86 assert_response.contains_one_link(
86 assert_response.contains_one_link(
87 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
87 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
88
88
89 def test_index_links_submodules_with_absolute_url_subpaths(
89 def test_index_links_submodules_with_absolute_url_subpaths(
90 self, backend_hg):
90 self, backend_hg):
91 repo = backend_hg['subrepos']
91 repo = backend_hg['subrepos']
92 response = self.app.get(url(
92 response = self.app.get(url(
93 controller='files', action='index',
93 controller='files', action='index',
94 repo_name=repo.repo_name, revision='tip', f_path='/'))
94 repo_name=repo.repo_name, revision='tip', f_path='/'))
95 assert_response = response.assert_response()
95 assert_response = response.assert_response()
96 assert_response.contains_one_link(
96 assert_response.contains_one_link(
97 'subpaths-path @ 000000000000',
97 'subpaths-path @ 000000000000',
98 'http://sub-base.example.com/subpaths-path')
98 'http://sub-base.example.com/subpaths-path')
99
99
100 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
100 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
101 def test_files_menu(self, backend):
101 def test_files_menu(self, backend):
102 new_branch = "temp_branch_name"
102 new_branch = "temp_branch_name"
103 commits = [
103 commits = [
104 {'message': 'a'},
104 {'message': 'a'},
105 {'message': 'b', 'branch': new_branch}
105 {'message': 'b', 'branch': new_branch}
106 ]
106 ]
107 backend.create_repo(commits)
107 backend.create_repo(commits)
108
108
109 backend.repo.landing_rev = "branch:%s" % new_branch
109 backend.repo.landing_rev = "branch:%s" % new_branch
110
110
111 # get response based on tip and not new revision
111 # get response based on tip and not new revision
112 response = self.app.get(url(
112 response = self.app.get(url(
113 controller='files', action='index',
113 controller='files', action='index',
114 repo_name=backend.repo_name, revision='tip', f_path='/'),
114 repo_name=backend.repo_name, revision='tip', f_path='/'),
115 status=200)
115 status=200)
116
116
117 # make sure Files menu url is not tip but new revision
117 # make sure Files menu url is not tip but new revision
118 landing_rev = backend.repo.landing_rev[1]
118 landing_rev = backend.repo.landing_rev[1]
119 files_url = url('files_home', repo_name=backend.repo_name,
119 files_url = url('files_home', repo_name=backend.repo_name,
120 revision=landing_rev)
120 revision=landing_rev)
121
121
122 assert landing_rev != 'tip'
122 assert landing_rev != 'tip'
123 response.mustcontain('<li class="active"><a class="menulink" href="%s">' % files_url)
123 response.mustcontain('<li class="active"><a class="menulink" href="%s">' % files_url)
124
124
125 def test_index_commit(self, backend):
125 def test_index_commit(self, backend):
126 commit = backend.repo.get_commit(commit_idx=32)
126 commit = backend.repo.get_commit(commit_idx=32)
127
127
128 response = self.app.get(url(
128 response = self.app.get(url(
129 controller='files', action='index',
129 controller='files', action='index',
130 repo_name=backend.repo_name,
130 repo_name=backend.repo_name,
131 revision=commit.raw_id,
131 revision=commit.raw_id,
132 f_path='/')
132 f_path='/')
133 )
133 )
134
134
135 dirs = ['docs', 'tests']
135 dirs = ['docs', 'tests']
136 files = ['README.rst']
136 files = ['README.rst']
137 params = {
137 params = {
138 'repo_name': backend.repo_name,
138 'repo_name': backend.repo_name,
139 'commit_id': commit.raw_id,
139 'commit_id': commit.raw_id,
140 }
140 }
141 assert_dirs_in_response(response, dirs, params)
141 assert_dirs_in_response(response, dirs, params)
142 assert_files_in_response(response, files, params)
142 assert_files_in_response(response, files, params)
143
143
144 def test_index_different_branch(self, backend):
144 def test_index_different_branch(self, backend):
145 branches = dict(
145 branches = dict(
146 hg=(150, ['git']),
146 hg=(150, ['git']),
147 # TODO: Git test repository does not contain other branches
147 # TODO: Git test repository does not contain other branches
148 git=(633, ['master']),
148 git=(633, ['master']),
149 # TODO: Branch support in Subversion
149 # TODO: Branch support in Subversion
150 svn=(150, [])
150 svn=(150, [])
151 )
151 )
152 idx, branches = branches[backend.alias]
152 idx, branches = branches[backend.alias]
153 commit = backend.repo.get_commit(commit_idx=idx)
153 commit = backend.repo.get_commit(commit_idx=idx)
154 response = self.app.get(url(
154 response = self.app.get(url(
155 controller='files', action='index',
155 controller='files', action='index',
156 repo_name=backend.repo_name,
156 repo_name=backend.repo_name,
157 revision=commit.raw_id,
157 revision=commit.raw_id,
158 f_path='/'))
158 f_path='/'))
159 assert_response = response.assert_response()
159 assert_response = response.assert_response()
160 for branch in branches:
160 for branch in branches:
161 assert_response.element_contains('.tags .branchtag', branch)
161 assert_response.element_contains('.tags .branchtag', branch)
162
162
163 def test_index_paging(self, backend):
163 def test_index_paging(self, backend):
164 repo = backend.repo
164 repo = backend.repo
165 indexes = [73, 92, 109, 1, 0]
165 indexes = [73, 92, 109, 1, 0]
166 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
166 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
167 for rev in indexes]
167 for rev in indexes]
168
168
169 for idx in idx_map:
169 for idx in idx_map:
170 response = self.app.get(url(
170 response = self.app.get(url(
171 controller='files', action='index',
171 controller='files', action='index',
172 repo_name=backend.repo_name,
172 repo_name=backend.repo_name,
173 revision=idx[1],
173 revision=idx[1],
174 f_path='/'))
174 f_path='/'))
175
175
176 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
176 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
177
177
178 def test_file_source(self, backend):
178 def test_file_source(self, backend):
179 commit = backend.repo.get_commit(commit_idx=167)
179 commit = backend.repo.get_commit(commit_idx=167)
180 response = self.app.get(url(
180 response = self.app.get(url(
181 controller='files', action='index',
181 controller='files', action='index',
182 repo_name=backend.repo_name,
182 repo_name=backend.repo_name,
183 revision=commit.raw_id,
183 revision=commit.raw_id,
184 f_path='vcs/nodes.py'))
184 f_path='vcs/nodes.py'))
185
185
186 msgbox = """<div class="commit right-content">%s</div>"""
186 msgbox = """<div class="commit right-content">%s</div>"""
187 response.mustcontain(msgbox % (commit.message, ))
187 response.mustcontain(msgbox % (commit.message, ))
188
188
189 assert_response = response.assert_response()
189 assert_response = response.assert_response()
190 if commit.branch:
190 if commit.branch:
191 assert_response.element_contains('.tags.tags-main .branchtag', commit.branch)
191 assert_response.element_contains('.tags.tags-main .branchtag', commit.branch)
192 if commit.tags:
192 if commit.tags:
193 for tag in commit.tags:
193 for tag in commit.tags:
194 assert_response.element_contains('.tags.tags-main .tagtag', tag)
194 assert_response.element_contains('.tags.tags-main .tagtag', tag)
195
195
196 def test_file_source_history(self, backend):
196 def test_file_source_history(self, backend):
197 response = self.app.get(
197 response = self.app.get(
198 url(
198 url(
199 controller='files', action='history',
199 controller='files', action='history',
200 repo_name=backend.repo_name,
200 repo_name=backend.repo_name,
201 revision='tip',
201 revision='tip',
202 f_path='vcs/nodes.py'),
202 f_path='vcs/nodes.py'),
203 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
203 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
204 assert NODE_HISTORY[backend.alias] == json.loads(response.body)
204 assert NODE_HISTORY[backend.alias] == json.loads(response.body)
205
205
206 def test_file_source_history_svn(self, backend_svn):
206 def test_file_source_history_svn(self, backend_svn):
207 simple_repo = backend_svn['svn-simple-layout']
207 simple_repo = backend_svn['svn-simple-layout']
208 response = self.app.get(
208 response = self.app.get(
209 url(
209 url(
210 controller='files', action='history',
210 controller='files', action='history',
211 repo_name=simple_repo.repo_name,
211 repo_name=simple_repo.repo_name,
212 revision='tip',
212 revision='tip',
213 f_path='trunk/example.py'),
213 f_path='trunk/example.py'),
214 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
214 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
215
215
216 expected_data = json.loads(
216 expected_data = json.loads(
217 fixture.load_resource('svn_node_history_branches.json'))
217 fixture.load_resource('svn_node_history_branches.json'))
218 assert expected_data == response.json
218 assert expected_data == response.json
219
219
220 def test_file_annotation_history(self, backend):
220 def test_file_annotation_history(self, backend):
221 response = self.app.get(
221 response = self.app.get(
222 url(
222 url(
223 controller='files', action='history',
223 controller='files', action='history',
224 repo_name=backend.repo_name,
224 repo_name=backend.repo_name,
225 revision='tip',
225 revision='tip',
226 f_path='vcs/nodes.py',
226 f_path='vcs/nodes.py',
227 annotate=True),
227 annotate=True),
228 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
228 extra_environ={'HTTP_X_PARTIAL_XHR': '1'})
229 assert NODE_HISTORY[backend.alias] == json.loads(response.body)
229 assert NODE_HISTORY[backend.alias] == json.loads(response.body)
230
230
231 def test_file_annotation(self, backend):
231 def test_file_annotation(self, backend):
232 response = self.app.get(url(
232 response = self.app.get(url(
233 controller='files', action='index',
233 controller='files', action='index',
234 repo_name=backend.repo_name, revision='tip', f_path='vcs/nodes.py',
234 repo_name=backend.repo_name, revision='tip', f_path='vcs/nodes.py',
235 annotate=True))
235 annotate=True))
236
236
237 expected_revisions = {
237 expected_revisions = {
238 'hg': 'r356',
238 'hg': 'r356',
239 'git': 'r345',
239 'git': 'r345',
240 'svn': 'r208',
240 'svn': 'r208',
241 }
241 }
242 response.mustcontain(expected_revisions[backend.alias])
242 response.mustcontain(expected_revisions[backend.alias])
243
243
244 def test_file_authors(self, backend):
244 def test_file_authors(self, backend):
245 response = self.app.get(url(
245 response = self.app.get(url(
246 controller='files', action='authors',
246 controller='files', action='authors',
247 repo_name=backend.repo_name,
247 repo_name=backend.repo_name,
248 revision='tip',
248 revision='tip',
249 f_path='vcs/nodes.py',
249 f_path='vcs/nodes.py',
250 annotate=True))
250 annotate=True))
251
251
252 expected_authors = {
252 expected_authors = {
253 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
253 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
254 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
254 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
255 'svn': ('marcin', 'lukasz'),
255 'svn': ('marcin', 'lukasz'),
256 }
256 }
257
257
258 for author in expected_authors[backend.alias]:
258 for author in expected_authors[backend.alias]:
259 response.mustcontain(author)
259 response.mustcontain(author)
260
260
261 def test_tree_search_top_level(self, backend, xhr_header):
261 def test_tree_search_top_level(self, backend, xhr_header):
262 commit = backend.repo.get_commit(commit_idx=173)
262 commit = backend.repo.get_commit(commit_idx=173)
263 response = self.app.get(
263 response = self.app.get(
264 url('files_nodelist_home', repo_name=backend.repo_name,
264 url('files_nodelist_home', repo_name=backend.repo_name,
265 revision=commit.raw_id, f_path='/'),
265 revision=commit.raw_id, f_path='/'),
266 extra_environ=xhr_header)
266 extra_environ=xhr_header)
267 assert 'nodes' in response.json
267 assert 'nodes' in response.json
268 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
268 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
269
269
270 def test_tree_search_at_path(self, backend, xhr_header):
270 def test_tree_search_at_path(self, backend, xhr_header):
271 commit = backend.repo.get_commit(commit_idx=173)
271 commit = backend.repo.get_commit(commit_idx=173)
272 response = self.app.get(
272 response = self.app.get(
273 url('files_nodelist_home', repo_name=backend.repo_name,
273 url('files_nodelist_home', repo_name=backend.repo_name,
274 revision=commit.raw_id, f_path='/docs'),
274 revision=commit.raw_id, f_path='/docs'),
275 extra_environ=xhr_header)
275 extra_environ=xhr_header)
276 assert 'nodes' in response.json
276 assert 'nodes' in response.json
277 nodes = response.json['nodes']
277 nodes = response.json['nodes']
278 assert {'name': 'docs/api', 'type': 'dir'} in nodes
278 assert {'name': 'docs/api', 'type': 'dir'} in nodes
279 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
279 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
280
280
281 def test_tree_search_at_path_missing_xhr(self, backend):
281 def test_tree_search_at_path_missing_xhr(self, backend):
282 self.app.get(
282 self.app.get(
283 url('files_nodelist_home', repo_name=backend.repo_name,
283 url('files_nodelist_home', repo_name=backend.repo_name,
284 revision='tip', f_path=''), status=400)
284 revision='tip', f_path=''), status=400)
285
285
286 def test_tree_view_list(self, backend, xhr_header):
286 def test_tree_view_list(self, backend, xhr_header):
287 commit = backend.repo.get_commit(commit_idx=173)
287 commit = backend.repo.get_commit(commit_idx=173)
288 response = self.app.get(
288 response = self.app.get(
289 url('files_nodelist_home', repo_name=backend.repo_name,
289 url('files_nodelist_home', repo_name=backend.repo_name,
290 f_path='/', revision=commit.raw_id),
290 f_path='/', revision=commit.raw_id),
291 extra_environ=xhr_header,
291 extra_environ=xhr_header,
292 )
292 )
293 response.mustcontain("vcs/web/simplevcs/views/repository.py")
293 response.mustcontain("vcs/web/simplevcs/views/repository.py")
294
294
295 def test_tree_view_list_at_path(self, backend, xhr_header):
295 def test_tree_view_list_at_path(self, backend, xhr_header):
296 commit = backend.repo.get_commit(commit_idx=173)
296 commit = backend.repo.get_commit(commit_idx=173)
297 response = self.app.get(
297 response = self.app.get(
298 url('files_nodelist_home', repo_name=backend.repo_name,
298 url('files_nodelist_home', repo_name=backend.repo_name,
299 f_path='/docs', revision=commit.raw_id),
299 f_path='/docs', revision=commit.raw_id),
300 extra_environ=xhr_header,
300 extra_environ=xhr_header,
301 )
301 )
302 response.mustcontain("docs/index.rst")
302 response.mustcontain("docs/index.rst")
303
303
304 def test_tree_view_list_missing_xhr(self, backend):
304 def test_tree_view_list_missing_xhr(self, backend):
305 self.app.get(
305 self.app.get(
306 url('files_nodelist_home', repo_name=backend.repo_name,
306 url('files_nodelist_home', repo_name=backend.repo_name,
307 f_path='/', revision='tip'), status=400)
307 f_path='/', revision='tip'), status=400)
308
308
309 def test_nodetree_full_success(self, backend, xhr_header):
309 def test_nodetree_full_success(self, backend, xhr_header):
310 commit = backend.repo.get_commit(commit_idx=173)
310 commit = backend.repo.get_commit(commit_idx=173)
311 response = self.app.get(
311 response = self.app.get(
312 url('files_nodetree_full', repo_name=backend.repo_name,
312 url('files_nodetree_full', repo_name=backend.repo_name,
313 f_path='/', commit_id=commit.raw_id),
313 f_path='/', commit_id=commit.raw_id),
314 extra_environ=xhr_header)
314 extra_environ=xhr_header)
315
315
316 assert_response = response.assert_response()
316 assert_response = response.assert_response()
317
317
318 for attr in ['data-commit-id', 'data-date', 'data-author']:
318 for attr in ['data-commit-id', 'data-date', 'data-author']:
319 elements = assert_response.get_elements('[{}]'.format(attr))
319 elements = assert_response.get_elements('[{}]'.format(attr))
320 assert len(elements) > 1
320 assert len(elements) > 1
321
321
322 for element in elements:
322 for element in elements:
323 assert element.get(attr)
323 assert element.get(attr)
324
324
325 def test_nodetree_full_if_file(self, backend, xhr_header):
325 def test_nodetree_full_if_file(self, backend, xhr_header):
326 commit = backend.repo.get_commit(commit_idx=173)
326 commit = backend.repo.get_commit(commit_idx=173)
327 response = self.app.get(
327 response = self.app.get(
328 url('files_nodetree_full', repo_name=backend.repo_name,
328 url('files_nodetree_full', repo_name=backend.repo_name,
329 f_path='README.rst', commit_id=commit.raw_id),
329 f_path='README.rst', commit_id=commit.raw_id),
330 extra_environ=xhr_header)
330 extra_environ=xhr_header)
331 assert response.body == ''
331 assert response.body == ''
332
332
333 def test_tree_metadata_list_missing_xhr(self, backend):
333 def test_tree_metadata_list_missing_xhr(self, backend):
334 self.app.get(
334 self.app.get(
335 url('files_nodetree_full', repo_name=backend.repo_name,
335 url('files_nodetree_full', repo_name=backend.repo_name,
336 f_path='/', commit_id='tip'), status=400)
336 f_path='/', commit_id='tip'), status=400)
337
337
338 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
338 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
339 self, app, backend_stub, autologin_regular_user, user_regular,
339 self, app, backend_stub, autologin_regular_user, user_regular,
340 user_util):
340 user_util):
341 repo = backend_stub.create_repo()
341 repo = backend_stub.create_repo()
342 user_util.grant_user_permission_to_repo(
342 user_util.grant_user_permission_to_repo(
343 repo, user_regular, 'repository.write')
343 repo, user_regular, 'repository.write')
344 response = self.app.get(url(
344 response = self.app.get(url(
345 controller='files', action='index',
345 controller='files', action='index',
346 repo_name=repo.repo_name, revision='tip', f_path='/'))
346 repo_name=repo.repo_name, revision='tip', f_path='/'))
347 assert_session_flash(
347 assert_session_flash(
348 response,
348 response,
349 'There are no files yet. <a class="alert-link" '
349 'There are no files yet. <a class="alert-link" '
350 'href="/%s/add/0/#edit">Click here to add a new file.</a>'
350 'href="/%s/add/0/#edit">Click here to add a new file.</a>'
351 % (repo.repo_name))
351 % (repo.repo_name))
352
352
353 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
353 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
354 self, backend_stub, user_util):
354 self, backend_stub, user_util):
355 repo = backend_stub.create_repo()
355 repo = backend_stub.create_repo()
356 repo_file_url = url(
356 repo_file_url = url(
357 'files_add_home',
357 'files_add_home',
358 repo_name=repo.repo_name,
358 repo_name=repo.repo_name,
359 revision=0, f_path='', anchor='edit')
359 revision=0, f_path='', anchor='edit')
360 response = self.app.get(url(
360 response = self.app.get(url(
361 controller='files', action='index',
361 controller='files', action='index',
362 repo_name=repo.repo_name, revision='tip', f_path='/'))
362 repo_name=repo.repo_name, revision='tip', f_path='/'))
363 assert_not_in_session_flash(response, repo_file_url)
363 assert_not_in_session_flash(response, repo_file_url)
364
364
365
365
366 # TODO: johbo: Think about a better place for these tests. Either controller
366 # TODO: johbo: Think about a better place for these tests. Either controller
367 # specific unit tests or we move down the whole logic further towards the vcs
367 # specific unit tests or we move down the whole logic further towards the vcs
368 # layer
368 # layer
369 class TestAdjustFilePathForSvn(object):
369 class TestAdjustFilePathForSvn(object):
370 """SVN specific adjustments of node history in FileController."""
370 """SVN specific adjustments of node history in FileController."""
371
371
372 def test_returns_path_relative_to_matched_reference(self):
372 def test_returns_path_relative_to_matched_reference(self):
373 repo = self._repo(branches=['trunk'])
373 repo = self._repo(branches=['trunk'])
374 self.assert_file_adjustment('trunk/file', 'file', repo)
374 self.assert_file_adjustment('trunk/file', 'file', repo)
375
375
376 def test_does_not_modify_file_if_no_reference_matches(self):
376 def test_does_not_modify_file_if_no_reference_matches(self):
377 repo = self._repo(branches=['trunk'])
377 repo = self._repo(branches=['trunk'])
378 self.assert_file_adjustment('notes/file', 'notes/file', repo)
378 self.assert_file_adjustment('notes/file', 'notes/file', repo)
379
379
380 def test_does_not_adjust_partial_directory_names(self):
380 def test_does_not_adjust_partial_directory_names(self):
381 repo = self._repo(branches=['trun'])
381 repo = self._repo(branches=['trun'])
382 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
382 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
383
383
384 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
384 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
385 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
385 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
386 self.assert_file_adjustment('trunk/new/file', 'file', repo)
386 self.assert_file_adjustment('trunk/new/file', 'file', repo)
387
387
388 def assert_file_adjustment(self, f_path, expected, repo):
388 def assert_file_adjustment(self, f_path, expected, repo):
389 controller = FilesController()
389 controller = FilesController()
390 result = controller._adjust_file_path_for_svn(f_path, repo)
390 result = controller._adjust_file_path_for_svn(f_path, repo)
391 assert result == expected
391 assert result == expected
392
392
393 def _repo(self, branches=None):
393 def _repo(self, branches=None):
394 repo = mock.Mock()
394 repo = mock.Mock()
395 repo.branches = OrderedDict((name, '0') for name in branches or [])
395 repo.branches = OrderedDict((name, '0') for name in branches or [])
396 repo.tags = {}
396 repo.tags = {}
397 return repo
397 return repo
398
398
399
399
400 @pytest.mark.usefixtures("app")
400 @pytest.mark.usefixtures("app")
401 class TestRepositoryArchival(object):
401 class TestRepositoryArchival(object):
402
402
403 def test_archival(self, backend):
403 def test_archival(self, backend):
404 backend.enable_downloads()
404 backend.enable_downloads()
405 commit = backend.repo.get_commit(commit_idx=173)
405 commit = backend.repo.get_commit(commit_idx=173)
406 for archive, info in settings.ARCHIVE_SPECS.items():
406 for archive, info in settings.ARCHIVE_SPECS.items():
407 mime_type, arch_ext = info
407 mime_type, arch_ext = info
408 short = commit.short_id + arch_ext
408 short = commit.short_id + arch_ext
409 fname = commit.raw_id + arch_ext
409 fname = commit.raw_id + arch_ext
410 filename = '%s-%s' % (backend.repo_name, short)
410 filename = '%s-%s' % (backend.repo_name, short)
411 response = self.app.get(url(controller='files',
411 response = self.app.get(url(controller='files',
412 action='archivefile',
412 action='archivefile',
413 repo_name=backend.repo_name,
413 repo_name=backend.repo_name,
414 fname=fname))
414 fname=fname))
415
415
416 assert response.status == '200 OK'
416 assert response.status == '200 OK'
417 headers = {
417 headers = [
418 'Pragma': 'no-cache',
418 ('Pragma', 'no-cache'),
419 'Cache-Control': 'no-cache',
419 ('Cache-Control', 'no-cache'),
420 'Content-Disposition': 'attachment; filename=%s' % filename,
420 ('Content-Disposition', 'attachment; filename=%s' % filename),
421 'Content-Type': '%s; charset=utf-8' % mime_type,
421 ('Content-Type', '%s' % mime_type),
422 }
422 ]
423 if 'Set-Cookie' in response.response.headers:
423 if 'Set-Cookie' in response.response.headers:
424 del response.response.headers['Set-Cookie']
424 del response.response.headers['Set-Cookie']
425 assert response.response.headers == headers
425 assert response.response.headers.items() == headers
426
426
427 def test_archival_wrong_ext(self, backend):
427 def test_archival_wrong_ext(self, backend):
428 backend.enable_downloads()
428 backend.enable_downloads()
429 commit = backend.repo.get_commit(commit_idx=173)
429 commit = backend.repo.get_commit(commit_idx=173)
430 for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']:
430 for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']:
431 fname = commit.raw_id + arch_ext
431 fname = commit.raw_id + arch_ext
432
432
433 response = self.app.get(url(controller='files',
433 response = self.app.get(url(controller='files',
434 action='archivefile',
434 action='archivefile',
435 repo_name=backend.repo_name,
435 repo_name=backend.repo_name,
436 fname=fname))
436 fname=fname))
437 response.mustcontain('Unknown archive type')
437 response.mustcontain('Unknown archive type')
438
438
439 def test_archival_wrong_commit_id(self, backend):
439 def test_archival_wrong_commit_id(self, backend):
440 backend.enable_downloads()
440 backend.enable_downloads()
441 for commit_id in ['00x000000', 'tar', 'wrong', '@##$@$42413232',
441 for commit_id in ['00x000000', 'tar', 'wrong', '@##$@$42413232',
442 '232dffcd']:
442 '232dffcd']:
443 fname = '%s.zip' % commit_id
443 fname = '%s.zip' % commit_id
444
444
445 response = self.app.get(url(controller='files',
445 response = self.app.get(url(controller='files',
446 action='archivefile',
446 action='archivefile',
447 repo_name=backend.repo_name,
447 repo_name=backend.repo_name,
448 fname=fname))
448 fname=fname))
449 response.mustcontain('Unknown revision')
449 response.mustcontain('Unknown revision')
450
450
451
451
452 @pytest.mark.usefixtures("app", "autologin_user")
452 @pytest.mark.usefixtures("app", "autologin_user")
453 class TestRawFileHandling(object):
453 class TestRawFileHandling(object):
454
454
455 def test_raw_file_ok(self, backend):
455 def test_raw_file_ok(self, backend):
456 commit = backend.repo.get_commit(commit_idx=173)
456 commit = backend.repo.get_commit(commit_idx=173)
457 response = self.app.get(url(controller='files', action='rawfile',
457 response = self.app.get(url(controller='files', action='rawfile',
458 repo_name=backend.repo_name,
458 repo_name=backend.repo_name,
459 revision=commit.raw_id,
459 revision=commit.raw_id,
460 f_path='vcs/nodes.py'))
460 f_path='vcs/nodes.py'))
461
461
462 assert response.content_disposition == "attachment; filename=nodes.py"
462 assert response.content_disposition == "attachment; filename=nodes.py"
463 assert response.content_type == "text/x-python"
463 assert response.content_type == "text/x-python"
464
464
465 def test_raw_file_wrong_cs(self, backend):
465 def test_raw_file_wrong_cs(self, backend):
466 commit_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
466 commit_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
467 f_path = 'vcs/nodes.py'
467 f_path = 'vcs/nodes.py'
468
468
469 response = self.app.get(url(controller='files', action='rawfile',
469 response = self.app.get(url(controller='files', action='rawfile',
470 repo_name=backend.repo_name,
470 repo_name=backend.repo_name,
471 revision=commit_id,
471 revision=commit_id,
472 f_path=f_path), status=404)
472 f_path=f_path), status=404)
473
473
474 msg = """No such commit exists for this repository"""
474 msg = """No such commit exists for this repository"""
475 response.mustcontain(msg)
475 response.mustcontain(msg)
476
476
477 def test_raw_file_wrong_f_path(self, backend):
477 def test_raw_file_wrong_f_path(self, backend):
478 commit = backend.repo.get_commit(commit_idx=173)
478 commit = backend.repo.get_commit(commit_idx=173)
479 f_path = 'vcs/ERRORnodes.py'
479 f_path = 'vcs/ERRORnodes.py'
480 response = self.app.get(url(controller='files', action='rawfile',
480 response = self.app.get(url(controller='files', action='rawfile',
481 repo_name=backend.repo_name,
481 repo_name=backend.repo_name,
482 revision=commit.raw_id,
482 revision=commit.raw_id,
483 f_path=f_path), status=404)
483 f_path=f_path), status=404)
484
484
485 msg = (
485 msg = (
486 "There is no file nor directory at the given path: "
486 "There is no file nor directory at the given path: "
487 "`%s` at commit %s" % (f_path, commit.short_id))
487 "`%s` at commit %s" % (f_path, commit.short_id))
488 response.mustcontain(msg)
488 response.mustcontain(msg)
489
489
490 def test_raw_ok(self, backend):
490 def test_raw_ok(self, backend):
491 commit = backend.repo.get_commit(commit_idx=173)
491 commit = backend.repo.get_commit(commit_idx=173)
492 response = self.app.get(url(controller='files', action='raw',
492 response = self.app.get(url(controller='files', action='raw',
493 repo_name=backend.repo_name,
493 repo_name=backend.repo_name,
494 revision=commit.raw_id,
494 revision=commit.raw_id,
495 f_path='vcs/nodes.py'))
495 f_path='vcs/nodes.py'))
496
496
497 assert response.content_type == "text/plain"
497 assert response.content_type == "text/plain"
498
498
499 def test_raw_wrong_cs(self, backend):
499 def test_raw_wrong_cs(self, backend):
500 commit_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
500 commit_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
501 f_path = 'vcs/nodes.py'
501 f_path = 'vcs/nodes.py'
502
502
503 response = self.app.get(url(controller='files', action='raw',
503 response = self.app.get(url(controller='files', action='raw',
504 repo_name=backend.repo_name,
504 repo_name=backend.repo_name,
505 revision=commit_id,
505 revision=commit_id,
506 f_path=f_path), status=404)
506 f_path=f_path), status=404)
507
507
508 msg = """No such commit exists for this repository"""
508 msg = """No such commit exists for this repository"""
509 response.mustcontain(msg)
509 response.mustcontain(msg)
510
510
511 def test_raw_wrong_f_path(self, backend):
511 def test_raw_wrong_f_path(self, backend):
512 commit = backend.repo.get_commit(commit_idx=173)
512 commit = backend.repo.get_commit(commit_idx=173)
513 f_path = 'vcs/ERRORnodes.py'
513 f_path = 'vcs/ERRORnodes.py'
514 response = self.app.get(url(controller='files', action='raw',
514 response = self.app.get(url(controller='files', action='raw',
515 repo_name=backend.repo_name,
515 repo_name=backend.repo_name,
516 revision=commit.raw_id,
516 revision=commit.raw_id,
517 f_path=f_path), status=404)
517 f_path=f_path), status=404)
518 msg = (
518 msg = (
519 "There is no file nor directory at the given path: "
519 "There is no file nor directory at the given path: "
520 "`%s` at commit %s" % (f_path, commit.short_id))
520 "`%s` at commit %s" % (f_path, commit.short_id))
521 response.mustcontain(msg)
521 response.mustcontain(msg)
522
522
523 def test_raw_svg_should_not_be_rendered(self, backend):
523 def test_raw_svg_should_not_be_rendered(self, backend):
524 backend.create_repo()
524 backend.create_repo()
525 backend.ensure_file("xss.svg")
525 backend.ensure_file("xss.svg")
526 response = self.app.get(url(controller='files', action='raw',
526 response = self.app.get(url(controller='files', action='raw',
527 repo_name=backend.repo_name,
527 repo_name=backend.repo_name,
528 revision='tip',
528 revision='tip',
529 f_path='xss.svg'))
529 f_path='xss.svg'))
530
530
531 # If the content type is image/svg+xml then it allows to render HTML
531 # If the content type is image/svg+xml then it allows to render HTML
532 # and malicious SVG.
532 # and malicious SVG.
533 assert response.content_type == "text/plain"
533 assert response.content_type == "text/plain"
534
534
535
535
536 @pytest.mark.usefixtures("app")
536 @pytest.mark.usefixtures("app")
537 class TestFilesDiff:
537 class TestFilesDiff:
538
538
539 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
539 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
540 def test_file_full_diff(self, backend, diff):
540 def test_file_full_diff(self, backend, diff):
541 commit1 = backend.repo.get_commit(commit_idx=-1)
541 commit1 = backend.repo.get_commit(commit_idx=-1)
542 commit2 = backend.repo.get_commit(commit_idx=-2)
542 commit2 = backend.repo.get_commit(commit_idx=-2)
543
543
544 response = self.app.get(
544 response = self.app.get(
545 url(
545 url(
546 controller='files',
546 controller='files',
547 action='diff',
547 action='diff',
548 repo_name=backend.repo_name,
548 repo_name=backend.repo_name,
549 f_path='README'),
549 f_path='README'),
550 params={
550 params={
551 'diff1': commit2.raw_id,
551 'diff1': commit2.raw_id,
552 'diff2': commit1.raw_id,
552 'diff2': commit1.raw_id,
553 'fulldiff': '1',
553 'fulldiff': '1',
554 'diff': diff,
554 'diff': diff,
555 })
555 })
556
556
557 if diff == 'diff':
557 if diff == 'diff':
558 # use redirect since this is OLD view redirecting to compare page
558 # use redirect since this is OLD view redirecting to compare page
559 response = response.follow()
559 response = response.follow()
560
560
561 # It's a symlink to README.rst
561 # It's a symlink to README.rst
562 response.mustcontain('README.rst')
562 response.mustcontain('README.rst')
563 response.mustcontain('No newline at end of file')
563 response.mustcontain('No newline at end of file')
564
564
565 def test_file_binary_diff(self, backend):
565 def test_file_binary_diff(self, backend):
566 commits = [
566 commits = [
567 {'message': 'First commit'},
567 {'message': 'First commit'},
568 {'message': 'Commit with binary',
568 {'message': 'Commit with binary',
569 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
569 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
570 ]
570 ]
571 repo = backend.create_repo(commits=commits)
571 repo = backend.create_repo(commits=commits)
572
572
573 response = self.app.get(
573 response = self.app.get(
574 url(
574 url(
575 controller='files',
575 controller='files',
576 action='diff',
576 action='diff',
577 repo_name=backend.repo_name,
577 repo_name=backend.repo_name,
578 f_path='file.bin'),
578 f_path='file.bin'),
579 params={
579 params={
580 'diff1': repo.get_commit(commit_idx=0).raw_id,
580 'diff1': repo.get_commit(commit_idx=0).raw_id,
581 'diff2': repo.get_commit(commit_idx=1).raw_id,
581 'diff2': repo.get_commit(commit_idx=1).raw_id,
582 'fulldiff': '1',
582 'fulldiff': '1',
583 'diff': 'diff',
583 'diff': 'diff',
584 })
584 })
585 # use redirect since this is OLD view redirecting to compare page
585 # use redirect since this is OLD view redirecting to compare page
586 response = response.follow()
586 response = response.follow()
587 response.mustcontain('Expand 1 commit')
587 response.mustcontain('Expand 1 commit')
588 response.mustcontain('1 file changed: 0 inserted, 0 deleted')
588 response.mustcontain('1 file changed: 0 inserted, 0 deleted')
589
589
590 if backend.alias == 'svn':
590 if backend.alias == 'svn':
591 response.mustcontain('new file 10644')
591 response.mustcontain('new file 10644')
592 # TODO(marcink): SVN doesn't yet detect binary changes
592 # TODO(marcink): SVN doesn't yet detect binary changes
593 else:
593 else:
594 response.mustcontain('new file 100644')
594 response.mustcontain('new file 100644')
595 response.mustcontain('binary diff hidden')
595 response.mustcontain('binary diff hidden')
596
596
597 def test_diff_2way(self, backend):
597 def test_diff_2way(self, backend):
598 commit1 = backend.repo.get_commit(commit_idx=-1)
598 commit1 = backend.repo.get_commit(commit_idx=-1)
599 commit2 = backend.repo.get_commit(commit_idx=-2)
599 commit2 = backend.repo.get_commit(commit_idx=-2)
600 response = self.app.get(
600 response = self.app.get(
601 url(
601 url(
602 controller='files',
602 controller='files',
603 action='diff_2way',
603 action='diff_2way',
604 repo_name=backend.repo_name,
604 repo_name=backend.repo_name,
605 f_path='README'),
605 f_path='README'),
606 params={
606 params={
607 'diff1': commit2.raw_id,
607 'diff1': commit2.raw_id,
608 'diff2': commit1.raw_id,
608 'diff2': commit1.raw_id,
609 })
609 })
610 # use redirect since this is OLD view redirecting to compare page
610 # use redirect since this is OLD view redirecting to compare page
611 response = response.follow()
611 response = response.follow()
612
612
613 # It's a symlink to README.rst
613 # It's a symlink to README.rst
614 response.mustcontain('README.rst')
614 response.mustcontain('README.rst')
615 response.mustcontain('No newline at end of file')
615 response.mustcontain('No newline at end of file')
616
616
617 def test_requires_one_commit_id(self, backend, autologin_user):
617 def test_requires_one_commit_id(self, backend, autologin_user):
618 response = self.app.get(
618 response = self.app.get(
619 url(
619 url(
620 controller='files',
620 controller='files',
621 action='diff',
621 action='diff',
622 repo_name=backend.repo_name,
622 repo_name=backend.repo_name,
623 f_path='README.rst'),
623 f_path='README.rst'),
624 status=400)
624 status=400)
625 response.mustcontain(
625 response.mustcontain(
626 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
626 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
627
627
628 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
628 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
629 repo = vcsbackend.repo
629 repo = vcsbackend.repo
630 response = self.app.get(
630 response = self.app.get(
631 url(
631 url(
632 controller='files',
632 controller='files',
633 action='diff',
633 action='diff',
634 repo_name=repo.name,
634 repo_name=repo.name,
635 f_path='does-not-exist-in-any-commit',
635 f_path='does-not-exist-in-any-commit',
636 diff1=repo[0].raw_id,
636 diff1=repo[0].raw_id,
637 diff2=repo[1].raw_id),)
637 diff2=repo[1].raw_id),)
638
638
639 response = response.follow()
639 response = response.follow()
640 response.mustcontain('No files')
640 response.mustcontain('No files')
641
641
642 def test_returns_redirect_if_file_not_changed(self, backend):
642 def test_returns_redirect_if_file_not_changed(self, backend):
643 commit = backend.repo.get_commit(commit_idx=-1)
643 commit = backend.repo.get_commit(commit_idx=-1)
644 f_path = 'README'
644 f_path = 'README'
645 response = self.app.get(
645 response = self.app.get(
646 url(
646 url(
647 controller='files',
647 controller='files',
648 action='diff_2way',
648 action='diff_2way',
649 repo_name=backend.repo_name,
649 repo_name=backend.repo_name,
650 f_path=f_path,
650 f_path=f_path,
651 diff1=commit.raw_id,
651 diff1=commit.raw_id,
652 diff2=commit.raw_id,
652 diff2=commit.raw_id,
653 ),
653 ),
654 )
654 )
655 response = response.follow()
655 response = response.follow()
656 response.mustcontain('No files')
656 response.mustcontain('No files')
657 response.mustcontain('No commits in this compare')
657 response.mustcontain('No commits in this compare')
658
658
659 def test_supports_diff_to_different_path_svn(self, backend_svn):
659 def test_supports_diff_to_different_path_svn(self, backend_svn):
660 #TODO: check this case
660 #TODO: check this case
661 return
661 return
662
662
663 repo = backend_svn['svn-simple-layout'].scm_instance()
663 repo = backend_svn['svn-simple-layout'].scm_instance()
664 commit_id_1 = '24'
664 commit_id_1 = '24'
665 commit_id_2 = '26'
665 commit_id_2 = '26'
666
666
667
667
668 print( url(
668 print( url(
669 controller='files',
669 controller='files',
670 action='diff',
670 action='diff',
671 repo_name=repo.name,
671 repo_name=repo.name,
672 f_path='trunk/example.py',
672 f_path='trunk/example.py',
673 diff1='tags/v0.2/example.py@' + commit_id_1,
673 diff1='tags/v0.2/example.py@' + commit_id_1,
674 diff2=commit_id_2))
674 diff2=commit_id_2))
675
675
676 response = self.app.get(
676 response = self.app.get(
677 url(
677 url(
678 controller='files',
678 controller='files',
679 action='diff',
679 action='diff',
680 repo_name=repo.name,
680 repo_name=repo.name,
681 f_path='trunk/example.py',
681 f_path='trunk/example.py',
682 diff1='tags/v0.2/example.py@' + commit_id_1,
682 diff1='tags/v0.2/example.py@' + commit_id_1,
683 diff2=commit_id_2))
683 diff2=commit_id_2))
684
684
685 response = response.follow()
685 response = response.follow()
686 response.mustcontain(
686 response.mustcontain(
687 # diff contains this
687 # diff contains this
688 "Will print out a useful message on invocation.")
688 "Will print out a useful message on invocation.")
689
689
690 # Note: Expecting that we indicate the user what's being compared
690 # Note: Expecting that we indicate the user what's being compared
691 response.mustcontain("trunk/example.py")
691 response.mustcontain("trunk/example.py")
692 response.mustcontain("tags/v0.2/example.py")
692 response.mustcontain("tags/v0.2/example.py")
693
693
694 def test_show_rev_redirects_to_svn_path(self, backend_svn):
694 def test_show_rev_redirects_to_svn_path(self, backend_svn):
695 #TODO: check this case
695 #TODO: check this case
696 return
696 return
697
697
698 repo = backend_svn['svn-simple-layout'].scm_instance()
698 repo = backend_svn['svn-simple-layout'].scm_instance()
699 commit_id = repo[-1].raw_id
699 commit_id = repo[-1].raw_id
700 response = self.app.get(
700 response = self.app.get(
701 url(
701 url(
702 controller='files',
702 controller='files',
703 action='diff',
703 action='diff',
704 repo_name=repo.name,
704 repo_name=repo.name,
705 f_path='trunk/example.py',
705 f_path='trunk/example.py',
706 diff1='branches/argparse/example.py@' + commit_id,
706 diff1='branches/argparse/example.py@' + commit_id,
707 diff2=commit_id),
707 diff2=commit_id),
708 params={'show_rev': 'Show at Revision'},
708 params={'show_rev': 'Show at Revision'},
709 status=302)
709 status=302)
710 assert response.headers['Location'].endswith(
710 assert response.headers['Location'].endswith(
711 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
711 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
712
712
713 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
713 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
714 #TODO: check this case
714 #TODO: check this case
715 return
715 return
716
716
717 repo = backend_svn['svn-simple-layout'].scm_instance()
717 repo = backend_svn['svn-simple-layout'].scm_instance()
718 commit_id = repo[-1].raw_id
718 commit_id = repo[-1].raw_id
719 response = self.app.get(
719 response = self.app.get(
720 url(
720 url(
721 controller='files',
721 controller='files',
722 action='diff',
722 action='diff',
723 repo_name=repo.name,
723 repo_name=repo.name,
724 f_path='trunk/example.py',
724 f_path='trunk/example.py',
725 diff1='branches/argparse/example.py@' + commit_id,
725 diff1='branches/argparse/example.py@' + commit_id,
726 diff2=commit_id),
726 diff2=commit_id),
727 params={
727 params={
728 'show_rev': 'Show at Revision',
728 'show_rev': 'Show at Revision',
729 'annotate': 'true',
729 'annotate': 'true',
730 },
730 },
731 status=302)
731 status=302)
732 assert response.headers['Location'].endswith(
732 assert response.headers['Location'].endswith(
733 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
733 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
734
734
735
735
736 @pytest.mark.usefixtures("app", "autologin_user")
736 @pytest.mark.usefixtures("app", "autologin_user")
737 class TestChangingFiles:
737 class TestChangingFiles:
738
738
739 def test_add_file_view(self, backend):
739 def test_add_file_view(self, backend):
740 self.app.get(url(
740 self.app.get(url(
741 'files_add_home',
741 'files_add_home',
742 repo_name=backend.repo_name,
742 repo_name=backend.repo_name,
743 revision='tip', f_path='/'))
743 revision='tip', f_path='/'))
744
744
745 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
745 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
746 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
746 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
747 repo = backend.create_repo()
747 repo = backend.create_repo()
748 filename = 'init.py'
748 filename = 'init.py'
749 response = self.app.post(
749 response = self.app.post(
750 url(
750 url(
751 'files_add',
751 'files_add',
752 repo_name=repo.repo_name,
752 repo_name=repo.repo_name,
753 revision='tip', f_path='/'),
753 revision='tip', f_path='/'),
754 params={
754 params={
755 'content': "",
755 'content': "",
756 'filename': filename,
756 'filename': filename,
757 'location': "",
757 'location': "",
758 'csrf_token': csrf_token,
758 'csrf_token': csrf_token,
759 },
759 },
760 status=302)
760 status=302)
761 assert_session_flash(response,
761 assert_session_flash(response,
762 'Successfully committed new file `{}`'.format(os.path.join(filename)))
762 'Successfully committed new file `{}`'.format(os.path.join(filename)))
763
763
764 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
764 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
765 response = self.app.post(
765 response = self.app.post(
766 url(
766 url(
767 'files_add',
767 'files_add',
768 repo_name=backend.repo_name,
768 repo_name=backend.repo_name,
769 revision='tip', f_path='/'),
769 revision='tip', f_path='/'),
770 params={
770 params={
771 'content': "foo",
771 'content': "foo",
772 'csrf_token': csrf_token,
772 'csrf_token': csrf_token,
773 },
773 },
774 status=302)
774 status=302)
775
775
776 assert_session_flash(response, 'No filename')
776 assert_session_flash(response, 'No filename')
777
777
778 def test_add_file_into_repo_errors_and_no_commits(
778 def test_add_file_into_repo_errors_and_no_commits(
779 self, backend, csrf_token):
779 self, backend, csrf_token):
780 repo = backend.create_repo()
780 repo = backend.create_repo()
781 # Create a file with no filename, it will display an error but
781 # Create a file with no filename, it will display an error but
782 # the repo has no commits yet
782 # the repo has no commits yet
783 response = self.app.post(
783 response = self.app.post(
784 url(
784 url(
785 'files_add',
785 'files_add',
786 repo_name=repo.repo_name,
786 repo_name=repo.repo_name,
787 revision='tip', f_path='/'),
787 revision='tip', f_path='/'),
788 params={
788 params={
789 'content': "foo",
789 'content': "foo",
790 'csrf_token': csrf_token,
790 'csrf_token': csrf_token,
791 },
791 },
792 status=302)
792 status=302)
793
793
794 assert_session_flash(response, 'No filename')
794 assert_session_flash(response, 'No filename')
795
795
796 # Not allowed, redirect to the summary
796 # Not allowed, redirect to the summary
797 redirected = response.follow()
797 redirected = response.follow()
798 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
798 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
799
799
800 # As there are no commits, displays the summary page with the error of
800 # As there are no commits, displays the summary page with the error of
801 # creating a file with no filename
801 # creating a file with no filename
802
802
803 assert redirected.request.path == summary_url
803 assert redirected.request.path == summary_url
804
804
805 @pytest.mark.parametrize("location, filename", [
805 @pytest.mark.parametrize("location, filename", [
806 ('/abs', 'foo'),
806 ('/abs', 'foo'),
807 ('../rel', 'foo'),
807 ('../rel', 'foo'),
808 ('file/../foo', 'foo'),
808 ('file/../foo', 'foo'),
809 ])
809 ])
810 def test_add_file_into_repo_bad_filenames(
810 def test_add_file_into_repo_bad_filenames(
811 self, location, filename, backend, csrf_token):
811 self, location, filename, backend, csrf_token):
812 response = self.app.post(
812 response = self.app.post(
813 url(
813 url(
814 'files_add',
814 'files_add',
815 repo_name=backend.repo_name,
815 repo_name=backend.repo_name,
816 revision='tip', f_path='/'),
816 revision='tip', f_path='/'),
817 params={
817 params={
818 'content': "foo",
818 'content': "foo",
819 'filename': filename,
819 'filename': filename,
820 'location': location,
820 'location': location,
821 'csrf_token': csrf_token,
821 'csrf_token': csrf_token,
822 },
822 },
823 status=302)
823 status=302)
824
824
825 assert_session_flash(
825 assert_session_flash(
826 response,
826 response,
827 'The location specified must be a relative path and must not '
827 'The location specified must be a relative path and must not '
828 'contain .. in the path')
828 'contain .. in the path')
829
829
830 @pytest.mark.parametrize("cnt, location, filename", [
830 @pytest.mark.parametrize("cnt, location, filename", [
831 (1, '', 'foo.txt'),
831 (1, '', 'foo.txt'),
832 (2, 'dir', 'foo.rst'),
832 (2, 'dir', 'foo.rst'),
833 (3, 'rel/dir', 'foo.bar'),
833 (3, 'rel/dir', 'foo.bar'),
834 ])
834 ])
835 def test_add_file_into_repo(self, cnt, location, filename, backend,
835 def test_add_file_into_repo(self, cnt, location, filename, backend,
836 csrf_token):
836 csrf_token):
837 repo = backend.create_repo()
837 repo = backend.create_repo()
838 response = self.app.post(
838 response = self.app.post(
839 url(
839 url(
840 'files_add',
840 'files_add',
841 repo_name=repo.repo_name,
841 repo_name=repo.repo_name,
842 revision='tip', f_path='/'),
842 revision='tip', f_path='/'),
843 params={
843 params={
844 'content': "foo",
844 'content': "foo",
845 'filename': filename,
845 'filename': filename,
846 'location': location,
846 'location': location,
847 'csrf_token': csrf_token,
847 'csrf_token': csrf_token,
848 },
848 },
849 status=302)
849 status=302)
850 assert_session_flash(response,
850 assert_session_flash(response,
851 'Successfully committed new file `{}`'.format(
851 'Successfully committed new file `{}`'.format(
852 os.path.join(location, filename)))
852 os.path.join(location, filename)))
853
853
854 def test_edit_file_view(self, backend):
854 def test_edit_file_view(self, backend):
855 response = self.app.get(
855 response = self.app.get(
856 url(
856 url(
857 'files_edit_home',
857 'files_edit_home',
858 repo_name=backend.repo_name,
858 repo_name=backend.repo_name,
859 revision=backend.default_head_id,
859 revision=backend.default_head_id,
860 f_path='vcs/nodes.py'),
860 f_path='vcs/nodes.py'),
861 status=200)
861 status=200)
862 response.mustcontain("Module holding everything related to vcs nodes.")
862 response.mustcontain("Module holding everything related to vcs nodes.")
863
863
864 def test_edit_file_view_not_on_branch(self, backend):
864 def test_edit_file_view_not_on_branch(self, backend):
865 repo = backend.create_repo()
865 repo = backend.create_repo()
866 backend.ensure_file("vcs/nodes.py")
866 backend.ensure_file("vcs/nodes.py")
867
867
868 response = self.app.get(
868 response = self.app.get(
869 url(
869 url(
870 'files_edit_home',
870 'files_edit_home',
871 repo_name=repo.repo_name,
871 repo_name=repo.repo_name,
872 revision='tip', f_path='vcs/nodes.py'),
872 revision='tip', f_path='vcs/nodes.py'),
873 status=302)
873 status=302)
874 assert_session_flash(
874 assert_session_flash(
875 response,
875 response,
876 'You can only edit files with revision being a valid branch')
876 'You can only edit files with revision being a valid branch')
877
877
878 def test_edit_file_view_commit_changes(self, backend, csrf_token):
878 def test_edit_file_view_commit_changes(self, backend, csrf_token):
879 repo = backend.create_repo()
879 repo = backend.create_repo()
880 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
880 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
881
881
882 response = self.app.post(
882 response = self.app.post(
883 url(
883 url(
884 'files_edit',
884 'files_edit',
885 repo_name=repo.repo_name,
885 repo_name=repo.repo_name,
886 revision=backend.default_head_id,
886 revision=backend.default_head_id,
887 f_path='vcs/nodes.py'),
887 f_path='vcs/nodes.py'),
888 params={
888 params={
889 'content': "print 'hello world'",
889 'content': "print 'hello world'",
890 'message': 'I committed',
890 'message': 'I committed',
891 'filename': "vcs/nodes.py",
891 'filename': "vcs/nodes.py",
892 'csrf_token': csrf_token,
892 'csrf_token': csrf_token,
893 },
893 },
894 status=302)
894 status=302)
895 assert_session_flash(
895 assert_session_flash(
896 response, 'Successfully committed changes to file `vcs/nodes.py`')
896 response, 'Successfully committed changes to file `vcs/nodes.py`')
897 tip = repo.get_commit(commit_idx=-1)
897 tip = repo.get_commit(commit_idx=-1)
898 assert tip.message == 'I committed'
898 assert tip.message == 'I committed'
899
899
900 def test_edit_file_view_commit_changes_default_message(self, backend,
900 def test_edit_file_view_commit_changes_default_message(self, backend,
901 csrf_token):
901 csrf_token):
902 repo = backend.create_repo()
902 repo = backend.create_repo()
903 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
903 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
904
904
905 commit_id = (
905 commit_id = (
906 backend.default_branch_name or
906 backend.default_branch_name or
907 backend.repo.scm_instance().commit_ids[-1])
907 backend.repo.scm_instance().commit_ids[-1])
908
908
909 response = self.app.post(
909 response = self.app.post(
910 url(
910 url(
911 'files_edit',
911 'files_edit',
912 repo_name=repo.repo_name,
912 repo_name=repo.repo_name,
913 revision=commit_id,
913 revision=commit_id,
914 f_path='vcs/nodes.py'),
914 f_path='vcs/nodes.py'),
915 params={
915 params={
916 'content': "print 'hello world'",
916 'content': "print 'hello world'",
917 'message': '',
917 'message': '',
918 'filename': "vcs/nodes.py",
918 'filename': "vcs/nodes.py",
919 'csrf_token': csrf_token,
919 'csrf_token': csrf_token,
920 },
920 },
921 status=302)
921 status=302)
922 assert_session_flash(
922 assert_session_flash(
923 response, 'Successfully committed changes to file `vcs/nodes.py`')
923 response, 'Successfully committed changes to file `vcs/nodes.py`')
924 tip = repo.get_commit(commit_idx=-1)
924 tip = repo.get_commit(commit_idx=-1)
925 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
925 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
926
926
927 def test_delete_file_view(self, backend):
927 def test_delete_file_view(self, backend):
928 self.app.get(url(
928 self.app.get(url(
929 'files_delete_home',
929 'files_delete_home',
930 repo_name=backend.repo_name,
930 repo_name=backend.repo_name,
931 revision='tip', f_path='vcs/nodes.py'))
931 revision='tip', f_path='vcs/nodes.py'))
932
932
933 def test_delete_file_view_not_on_branch(self, backend):
933 def test_delete_file_view_not_on_branch(self, backend):
934 repo = backend.create_repo()
934 repo = backend.create_repo()
935 backend.ensure_file('vcs/nodes.py')
935 backend.ensure_file('vcs/nodes.py')
936
936
937 response = self.app.get(
937 response = self.app.get(
938 url(
938 url(
939 'files_delete_home',
939 'files_delete_home',
940 repo_name=repo.repo_name,
940 repo_name=repo.repo_name,
941 revision='tip', f_path='vcs/nodes.py'),
941 revision='tip', f_path='vcs/nodes.py'),
942 status=302)
942 status=302)
943 assert_session_flash(
943 assert_session_flash(
944 response,
944 response,
945 'You can only delete files with revision being a valid branch')
945 'You can only delete files with revision being a valid branch')
946
946
947 def test_delete_file_view_commit_changes(self, backend, csrf_token):
947 def test_delete_file_view_commit_changes(self, backend, csrf_token):
948 repo = backend.create_repo()
948 repo = backend.create_repo()
949 backend.ensure_file("vcs/nodes.py")
949 backend.ensure_file("vcs/nodes.py")
950
950
951 response = self.app.post(
951 response = self.app.post(
952 url(
952 url(
953 'files_delete_home',
953 'files_delete_home',
954 repo_name=repo.repo_name,
954 repo_name=repo.repo_name,
955 revision=backend.default_head_id,
955 revision=backend.default_head_id,
956 f_path='vcs/nodes.py'),
956 f_path='vcs/nodes.py'),
957 params={
957 params={
958 'message': 'i commited',
958 'message': 'i commited',
959 'csrf_token': csrf_token,
959 'csrf_token': csrf_token,
960 },
960 },
961 status=302)
961 status=302)
962 assert_session_flash(
962 assert_session_flash(
963 response, 'Successfully deleted file `vcs/nodes.py`')
963 response, 'Successfully deleted file `vcs/nodes.py`')
964
964
965
965
966 def assert_files_in_response(response, files, params):
966 def assert_files_in_response(response, files, params):
967 template = (
967 template = (
968 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
968 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
969 _assert_items_in_response(response, files, template, params)
969 _assert_items_in_response(response, files, template, params)
970
970
971
971
972 def assert_dirs_in_response(response, dirs, params):
972 def assert_dirs_in_response(response, dirs, params):
973 template = (
973 template = (
974 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
974 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
975 _assert_items_in_response(response, dirs, template, params)
975 _assert_items_in_response(response, dirs, template, params)
976
976
977
977
978 def _assert_items_in_response(response, items, template, params):
978 def _assert_items_in_response(response, items, template, params):
979 for item in items:
979 for item in items:
980 item_params = {'name': item}
980 item_params = {'name': item}
981 item_params.update(params)
981 item_params.update(params)
982 response.mustcontain(template % item_params)
982 response.mustcontain(template % item_params)
983
983
984
984
985 def assert_timeago_in_response(response, items, params):
985 def assert_timeago_in_response(response, items, params):
986 for item in items:
986 for item in items:
987 response.mustcontain(h.age_component(params['date']))
987 response.mustcontain(h.age_component(params['date']))
@@ -1,452 +1,455 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import base64
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.tests.utils import CustomTestApp
26 from rhodecode.tests.utils import CustomTestApp
27
27
28 from rhodecode.lib.caching_query import FromCache
28 from rhodecode.lib.caching_query import FromCache
29 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
29 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 from rhodecode.lib.middleware import simplevcs
30 from rhodecode.lib.middleware import simplevcs
31 from rhodecode.lib.middleware.https_fixup import HttpsFixup
31 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 from rhodecode.lib.middleware.utils import scm_app_http
32 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.model.db import User, _hash_key
33 from rhodecode.model.db import User, _hash_key
34 from rhodecode.model.meta import Session
34 from rhodecode.model.meta import Session
35 from rhodecode.tests import (
35 from rhodecode.tests import (
36 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
36 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 from rhodecode.tests.lib.middleware import mock_scm_app
37 from rhodecode.tests.lib.middleware import mock_scm_app
38
38
39
39
40 class StubVCSController(simplevcs.SimpleVCS):
40 class StubVCSController(simplevcs.SimpleVCS):
41
41
42 SCM = 'hg'
42 SCM = 'hg'
43 stub_response_body = tuple()
43 stub_response_body = tuple()
44
44
45 def __init__(self, *args, **kwargs):
45 def __init__(self, *args, **kwargs):
46 super(StubVCSController, self).__init__(*args, **kwargs)
46 super(StubVCSController, self).__init__(*args, **kwargs)
47 self._action = 'pull'
47 self._action = 'pull'
48 self._name = HG_REPO
48 self._name = HG_REPO
49 self.set_repo_names(None)
49 self.set_repo_names(None)
50
50
51 def _get_repository_name(self, environ):
51 def _get_repository_name(self, environ):
52 return self._name
52 return self._name
53
53
54 def _get_action(self, environ):
54 def _get_action(self, environ):
55 return self._action
55 return self._action
56
56
57 def _create_wsgi_app(self, repo_path, repo_name, config):
57 def _create_wsgi_app(self, repo_path, repo_name, config):
58 def fake_app(environ, start_response):
58 def fake_app(environ, start_response):
59 start_response('200 OK', [])
59 headers = [
60 ('Http-Accept', 'application/mercurial')
61 ]
62 start_response('200 OK', headers)
60 return self.stub_response_body
63 return self.stub_response_body
61 return fake_app
64 return fake_app
62
65
63 def _create_config(self, extras, repo_name):
66 def _create_config(self, extras, repo_name):
64 return None
67 return None
65
68
66
69
67 @pytest.fixture
70 @pytest.fixture
68 def vcscontroller(pylonsapp, config_stub):
71 def vcscontroller(pylonsapp, config_stub):
69 config_stub.testing_securitypolicy()
72 config_stub.testing_securitypolicy()
70 config_stub.include('rhodecode.authentication')
73 config_stub.include('rhodecode.authentication')
71
74
72 #set_anonymous_access(True)
75 #set_anonymous_access(True)
73 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
76 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
74 app = HttpsFixup(controller, pylonsapp.config)
77 app = HttpsFixup(controller, pylonsapp.config)
75 app = CustomTestApp(app)
78 app = CustomTestApp(app)
76
79
77 _remove_default_user_from_query_cache()
80 _remove_default_user_from_query_cache()
78
81
79 # Sanity checks that things are set up correctly
82 # Sanity checks that things are set up correctly
80 app.get('/' + HG_REPO, status=200)
83 app.get('/' + HG_REPO, status=200)
81
84
82 app.controller = controller
85 app.controller = controller
83 return app
86 return app
84
87
85
88
86 def _remove_default_user_from_query_cache():
89 def _remove_default_user_from_query_cache():
87 user = User.get_default_user(cache=True)
90 user = User.get_default_user(cache=True)
88 query = Session().query(User).filter(User.username == user.username)
91 query = Session().query(User).filter(User.username == user.username)
89 query = query.options(
92 query = query.options(
90 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
93 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
91 query.invalidate()
94 query.invalidate()
92 Session().expire(user)
95 Session().expire(user)
93
96
94
97
95
96
97 def test_handles_exceptions_during_permissions_checks(
98 def test_handles_exceptions_during_permissions_checks(
98 vcscontroller, disable_anonymous_user):
99 vcscontroller, disable_anonymous_user):
99 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
100 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
100 auth_password = base64.encodestring(user_and_pass).strip()
101 auth_password = base64.encodestring(user_and_pass).strip()
101 extra_environ = {
102 extra_environ = {
102 'AUTH_TYPE': 'Basic',
103 'AUTH_TYPE': 'Basic',
103 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
104 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
104 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
105 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
105 }
106 }
106
107
107 # Verify that things are hooked up correctly
108 # Verify that things are hooked up correctly
108 vcscontroller.get('/', status=200, extra_environ=extra_environ)
109 vcscontroller.get('/', status=200, extra_environ=extra_environ)
109
110
110 # Simulate trouble during permission checks
111 # Simulate trouble during permission checks
111 with mock.patch('rhodecode.model.db.User.get_by_username',
112 with mock.patch('rhodecode.model.db.User.get_by_username',
112 side_effect=Exception) as get_user:
113 side_effect=Exception) as get_user:
113 # Verify that a correct 500 is returned and check that the expected
114 # Verify that a correct 500 is returned and check that the expected
114 # code path was hit.
115 # code path was hit.
115 vcscontroller.get('/', status=500, extra_environ=extra_environ)
116 vcscontroller.get('/', status=500, extra_environ=extra_environ)
116 assert get_user.called
117 assert get_user.called
117
118
118
119
119 def test_returns_forbidden_if_no_anonymous_access(
120 def test_returns_forbidden_if_no_anonymous_access(
120 vcscontroller, disable_anonymous_user):
121 vcscontroller, disable_anonymous_user):
121 vcscontroller.get('/', status=401)
122 vcscontroller.get('/', status=401)
122
123
123
124
124 class StubFailVCSController(simplevcs.SimpleVCS):
125 class StubFailVCSController(simplevcs.SimpleVCS):
125 def _handle_request(self, environ, start_response):
126 def _handle_request(self, environ, start_response):
126 raise Exception("BOOM")
127 raise Exception("BOOM")
127
128
128
129
129 @pytest.fixture(scope='module')
130 @pytest.fixture(scope='module')
130 def fail_controller(pylonsapp):
131 def fail_controller(pylonsapp):
131 controller = StubFailVCSController(pylonsapp, pylonsapp.config, None)
132 controller = StubFailVCSController(pylonsapp, pylonsapp.config, None)
132 controller = HttpsFixup(controller, pylonsapp.config)
133 controller = HttpsFixup(controller, pylonsapp.config)
133 controller = CustomTestApp(controller)
134 controller = CustomTestApp(controller)
134 return controller
135 return controller
135
136
136
137
137 def test_handles_exceptions_as_internal_server_error(fail_controller):
138 def test_handles_exceptions_as_internal_server_error(fail_controller):
138 fail_controller.get('/', status=500)
139 fail_controller.get('/', status=500)
139
140
140
141
141 def test_provides_traceback_for_appenlight(fail_controller):
142 def test_provides_traceback_for_appenlight(fail_controller):
142 response = fail_controller.get(
143 response = fail_controller.get(
143 '/', status=500, extra_environ={'appenlight.client': 'fake'})
144 '/', status=500, extra_environ={'appenlight.client': 'fake'})
144 assert 'appenlight.__traceback' in response.request.environ
145 assert 'appenlight.__traceback' in response.request.environ
145
146
146
147
147 def test_provides_utils_scm_app_as_scm_app_by_default(pylonsapp):
148 def test_provides_utils_scm_app_as_scm_app_by_default(pylonsapp):
148 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
149 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
149 assert controller.scm_app is scm_app_http
150 assert controller.scm_app is scm_app_http
150
151
151
152
152 def test_allows_to_override_scm_app_via_config(pylonsapp):
153 def test_allows_to_override_scm_app_via_config(pylonsapp):
153 config = pylonsapp.config.copy()
154 config = pylonsapp.config.copy()
154 config['vcs.scm_app_implementation'] = (
155 config['vcs.scm_app_implementation'] = (
155 'rhodecode.tests.lib.middleware.mock_scm_app')
156 'rhodecode.tests.lib.middleware.mock_scm_app')
156 controller = StubVCSController(pylonsapp, config, None)
157 controller = StubVCSController(pylonsapp, config, None)
157 assert controller.scm_app is mock_scm_app
158 assert controller.scm_app is mock_scm_app
158
159
159
160
160 @pytest.mark.parametrize('query_string, expected', [
161 @pytest.mark.parametrize('query_string, expected', [
161 ('cmd=stub_command', True),
162 ('cmd=stub_command', True),
162 ('cmd=listkeys', False),
163 ('cmd=listkeys', False),
163 ])
164 ])
164 def test_should_check_locking(query_string, expected):
165 def test_should_check_locking(query_string, expected):
165 result = simplevcs._should_check_locking(query_string)
166 result = simplevcs._should_check_locking(query_string)
166 assert result == expected
167 assert result == expected
167
168
168
169
169 class TestShadowRepoRegularExpression(object):
170 class TestShadowRepoRegularExpression(object):
170 pr_segment = 'pull-request'
171 pr_segment = 'pull-request'
171 shadow_segment = 'repository'
172 shadow_segment = 'repository'
172
173
173 @pytest.mark.parametrize('url, expected', [
174 @pytest.mark.parametrize('url, expected', [
174 # repo with/without groups
175 # repo with/without groups
175 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
176 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
176 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
177 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
177 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
178 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
178 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
179 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
179
180
180 # pull request ID
181 # pull request ID
181 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
182 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
182 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
183 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
183 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
184 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
184 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
185 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
185
186
186 # unicode
187 # unicode
187 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
188 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
188 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
189 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
189
190
190 # trailing/leading slash
191 # trailing/leading slash
191 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
192 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
192 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
193 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
193 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
194 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
194
195
195 # misc
196 # misc
196 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
197 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
197 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
198 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
198 ])
199 ])
199 def test_shadow_repo_regular_expression(self, url, expected):
200 def test_shadow_repo_regular_expression(self, url, expected):
200 from rhodecode.lib.middleware.simplevcs import SimpleVCS
201 from rhodecode.lib.middleware.simplevcs import SimpleVCS
201 url = url.format(
202 url = url.format(
202 pr_segment=self.pr_segment,
203 pr_segment=self.pr_segment,
203 shadow_segment=self.shadow_segment)
204 shadow_segment=self.shadow_segment)
204 match_obj = SimpleVCS.shadow_repo_re.match(url)
205 match_obj = SimpleVCS.shadow_repo_re.match(url)
205 assert (match_obj is not None) == expected
206 assert (match_obj is not None) == expected
206
207
207
208
208 @pytest.mark.backends('git', 'hg')
209 @pytest.mark.backends('git', 'hg')
209 class TestShadowRepoExposure(object):
210 class TestShadowRepoExposure(object):
210
211
211 def test_pull_on_shadow_repo_propagates_to_wsgi_app(self, pylonsapp):
212 def test_pull_on_shadow_repo_propagates_to_wsgi_app(self, pylonsapp):
212 """
213 """
213 Check that a pull action to a shadow repo is propagated to the
214 Check that a pull action to a shadow repo is propagated to the
214 underlying wsgi app.
215 underlying wsgi app.
215 """
216 """
216 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
217 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
217 controller._check_ssl = mock.Mock()
218 controller._check_ssl = mock.Mock()
218 controller.is_shadow_repo = True
219 controller.is_shadow_repo = True
219 controller._action = 'pull'
220 controller._action = 'pull'
220 controller.stub_response_body = 'dummy body value'
221 controller.stub_response_body = 'dummy body value'
221 environ_stub = {
222 environ_stub = {
222 'HTTP_HOST': 'test.example.com',
223 'HTTP_HOST': 'test.example.com',
224 'HTTP_ACCEPT': 'application/mercurial',
223 'REQUEST_METHOD': 'GET',
225 'REQUEST_METHOD': 'GET',
224 'wsgi.url_scheme': 'http',
226 'wsgi.url_scheme': 'http',
225 }
227 }
226
228
227 response = controller(environ_stub, mock.Mock())
229 response = controller(environ_stub, mock.Mock())
228 response_body = ''.join(response)
230 response_body = ''.join(response)
229
231
230 # Assert that we got the response from the wsgi app.
232 # Assert that we got the response from the wsgi app.
231 assert response_body == controller.stub_response_body
233 assert response_body == controller.stub_response_body
232
234
233 def test_push_on_shadow_repo_raises(self, pylonsapp):
235 def test_push_on_shadow_repo_raises(self, pylonsapp):
234 """
236 """
235 Check that a push action to a shadow repo is aborted.
237 Check that a push action to a shadow repo is aborted.
236 """
238 """
237 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
239 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
238 controller._check_ssl = mock.Mock()
240 controller._check_ssl = mock.Mock()
239 controller.is_shadow_repo = True
241 controller.is_shadow_repo = True
240 controller._action = 'push'
242 controller._action = 'push'
241 controller.stub_response_body = 'dummy body value'
243 controller.stub_response_body = 'dummy body value'
242 environ_stub = {
244 environ_stub = {
243 'HTTP_HOST': 'test.example.com',
245 'HTTP_HOST': 'test.example.com',
246 'HTTP_ACCEPT': 'application/mercurial',
244 'REQUEST_METHOD': 'GET',
247 'REQUEST_METHOD': 'GET',
245 'wsgi.url_scheme': 'http',
248 'wsgi.url_scheme': 'http',
246 }
249 }
247
250
248 response = controller(environ_stub, mock.Mock())
251 response = controller(environ_stub, mock.Mock())
249 response_body = ''.join(response)
252 response_body = ''.join(response)
250
253
251 assert response_body != controller.stub_response_body
254 assert response_body != controller.stub_response_body
252 # Assert that a 406 error is returned.
255 # Assert that a 406 error is returned.
253 assert '406 Not Acceptable' in response_body
256 assert '406 Not Acceptable' in response_body
254
257
255 def test_set_repo_names_no_shadow(self, pylonsapp):
258 def test_set_repo_names_no_shadow(self, pylonsapp):
256 """
259 """
257 Check that the set_repo_names method sets all names to the one returned
260 Check that the set_repo_names method sets all names to the one returned
258 by the _get_repository_name method on a request to a non shadow repo.
261 by the _get_repository_name method on a request to a non shadow repo.
259 """
262 """
260 environ_stub = {}
263 environ_stub = {}
261 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
264 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
262 controller._name = 'RepoGroup/MyRepo'
265 controller._name = 'RepoGroup/MyRepo'
263 controller.set_repo_names(environ_stub)
266 controller.set_repo_names(environ_stub)
264 assert not controller.is_shadow_repo
267 assert not controller.is_shadow_repo
265 assert (controller.url_repo_name ==
268 assert (controller.url_repo_name ==
266 controller.acl_repo_name ==
269 controller.acl_repo_name ==
267 controller.vcs_repo_name ==
270 controller.vcs_repo_name ==
268 controller._get_repository_name(environ_stub))
271 controller._get_repository_name(environ_stub))
269
272
270 def test_set_repo_names_with_shadow(self, pylonsapp, pr_util, config_stub):
273 def test_set_repo_names_with_shadow(self, pylonsapp, pr_util, config_stub):
271 """
274 """
272 Check that the set_repo_names method sets correct names on a request
275 Check that the set_repo_names method sets correct names on a request
273 to a shadow repo.
276 to a shadow repo.
274 """
277 """
275 from rhodecode.model.pull_request import PullRequestModel
278 from rhodecode.model.pull_request import PullRequestModel
276
279
277 pull_request = pr_util.create_pull_request()
280 pull_request = pr_util.create_pull_request()
278 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
281 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
279 target=pull_request.target_repo.repo_name,
282 target=pull_request.target_repo.repo_name,
280 pr_id=pull_request.pull_request_id,
283 pr_id=pull_request.pull_request_id,
281 pr_segment=TestShadowRepoRegularExpression.pr_segment,
284 pr_segment=TestShadowRepoRegularExpression.pr_segment,
282 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
285 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
283 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
286 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
284 controller._name = shadow_url
287 controller._name = shadow_url
285 controller.set_repo_names({})
288 controller.set_repo_names({})
286
289
287 # Get file system path to shadow repo for assertions.
290 # Get file system path to shadow repo for assertions.
288 workspace_id = PullRequestModel()._workspace_id(pull_request)
291 workspace_id = PullRequestModel()._workspace_id(pull_request)
289 target_vcs = pull_request.target_repo.scm_instance()
292 target_vcs = pull_request.target_repo.scm_instance()
290 vcs_repo_name = target_vcs._get_shadow_repository_path(
293 vcs_repo_name = target_vcs._get_shadow_repository_path(
291 workspace_id)
294 workspace_id)
292
295
293 assert controller.vcs_repo_name == vcs_repo_name
296 assert controller.vcs_repo_name == vcs_repo_name
294 assert controller.url_repo_name == shadow_url
297 assert controller.url_repo_name == shadow_url
295 assert controller.acl_repo_name == pull_request.target_repo.repo_name
298 assert controller.acl_repo_name == pull_request.target_repo.repo_name
296 assert controller.is_shadow_repo
299 assert controller.is_shadow_repo
297
300
298 def test_set_repo_names_with_shadow_but_missing_pr(
301 def test_set_repo_names_with_shadow_but_missing_pr(
299 self, pylonsapp, pr_util, config_stub):
302 self, pylonsapp, pr_util, config_stub):
300 """
303 """
301 Checks that the set_repo_names method enforces matching target repos
304 Checks that the set_repo_names method enforces matching target repos
302 and pull request IDs.
305 and pull request IDs.
303 """
306 """
304 pull_request = pr_util.create_pull_request()
307 pull_request = pr_util.create_pull_request()
305 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
308 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
306 target=pull_request.target_repo.repo_name,
309 target=pull_request.target_repo.repo_name,
307 pr_id=999999999,
310 pr_id=999999999,
308 pr_segment=TestShadowRepoRegularExpression.pr_segment,
311 pr_segment=TestShadowRepoRegularExpression.pr_segment,
309 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
312 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
310 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
313 controller = StubVCSController(pylonsapp, pylonsapp.config, None)
311 controller._name = shadow_url
314 controller._name = shadow_url
312 controller.set_repo_names({})
315 controller.set_repo_names({})
313
316
314 assert not controller.is_shadow_repo
317 assert not controller.is_shadow_repo
315 assert (controller.url_repo_name ==
318 assert (controller.url_repo_name ==
316 controller.acl_repo_name ==
319 controller.acl_repo_name ==
317 controller.vcs_repo_name)
320 controller.vcs_repo_name)
318
321
319
322
320 @pytest.mark.usefixtures('db')
323 @pytest.mark.usefixtures('db')
321 class TestGenerateVcsResponse(object):
324 class TestGenerateVcsResponse(object):
322
325
323 def test_ensures_that_start_response_is_called_early_enough(self):
326 def test_ensures_that_start_response_is_called_early_enough(self):
324 self.call_controller_with_response_body(iter(['a', 'b']))
327 self.call_controller_with_response_body(iter(['a', 'b']))
325 assert self.start_response.called
328 assert self.start_response.called
326
329
327 def test_invalidates_cache_after_body_is_consumed(self):
330 def test_invalidates_cache_after_body_is_consumed(self):
328 result = self.call_controller_with_response_body(iter(['a', 'b']))
331 result = self.call_controller_with_response_body(iter(['a', 'b']))
329 assert not self.was_cache_invalidated()
332 assert not self.was_cache_invalidated()
330 # Consume the result
333 # Consume the result
331 list(result)
334 list(result)
332 assert self.was_cache_invalidated()
335 assert self.was_cache_invalidated()
333
336
334 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
337 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
335 def test_handles_locking_exception(self, http_locked_rc):
338 def test_handles_locking_exception(self, http_locked_rc):
336 result = self.call_controller_with_response_body(
339 result = self.call_controller_with_response_body(
337 self.raise_result_iter(vcs_kind='repo_locked'))
340 self.raise_result_iter(vcs_kind='repo_locked'))
338 assert not http_locked_rc.called
341 assert not http_locked_rc.called
339 # Consume the result
342 # Consume the result
340 list(result)
343 list(result)
341 assert http_locked_rc.called
344 assert http_locked_rc.called
342
345
343 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
346 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
344 def test_handles_requirement_exception(self, http_requirement):
347 def test_handles_requirement_exception(self, http_requirement):
345 result = self.call_controller_with_response_body(
348 result = self.call_controller_with_response_body(
346 self.raise_result_iter(vcs_kind='requirement'))
349 self.raise_result_iter(vcs_kind='requirement'))
347 assert not http_requirement.called
350 assert not http_requirement.called
348 # Consume the result
351 # Consume the result
349 list(result)
352 list(result)
350 assert http_requirement.called
353 assert http_requirement.called
351
354
352 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
355 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
353 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
356 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
354 app_factory_patcher = mock.patch.object(
357 app_factory_patcher = mock.patch.object(
355 StubVCSController, '_create_wsgi_app')
358 StubVCSController, '_create_wsgi_app')
356 with app_factory_patcher as app_factory:
359 with app_factory_patcher as app_factory:
357 app_factory().side_effect = self.vcs_exception()
360 app_factory().side_effect = self.vcs_exception()
358 result = self.call_controller_with_response_body(['a'])
361 result = self.call_controller_with_response_body(['a'])
359 list(result)
362 list(result)
360 assert http_locked_rc.called
363 assert http_locked_rc.called
361
364
362 def test_raises_unknown_exceptions(self):
365 def test_raises_unknown_exceptions(self):
363 result = self.call_controller_with_response_body(
366 result = self.call_controller_with_response_body(
364 self.raise_result_iter(vcs_kind='unknown'))
367 self.raise_result_iter(vcs_kind='unknown'))
365 with pytest.raises(Exception):
368 with pytest.raises(Exception):
366 list(result)
369 list(result)
367
370
368 def test_prepare_callback_daemon_is_called(self):
371 def test_prepare_callback_daemon_is_called(self):
369 def side_effect(extras):
372 def side_effect(extras):
370 return DummyHooksCallbackDaemon(), extras
373 return DummyHooksCallbackDaemon(), extras
371
374
372 prepare_patcher = mock.patch.object(
375 prepare_patcher = mock.patch.object(
373 StubVCSController, '_prepare_callback_daemon')
376 StubVCSController, '_prepare_callback_daemon')
374 with prepare_patcher as prepare_mock:
377 with prepare_patcher as prepare_mock:
375 prepare_mock.side_effect = side_effect
378 prepare_mock.side_effect = side_effect
376 self.call_controller_with_response_body(iter(['a', 'b']))
379 self.call_controller_with_response_body(iter(['a', 'b']))
377 assert prepare_mock.called
380 assert prepare_mock.called
378 assert prepare_mock.call_count == 1
381 assert prepare_mock.call_count == 1
379
382
380 def call_controller_with_response_body(self, response_body):
383 def call_controller_with_response_body(self, response_body):
381 settings = {
384 settings = {
382 'base_path': 'fake_base_path',
385 'base_path': 'fake_base_path',
383 'vcs.hooks.protocol': 'http',
386 'vcs.hooks.protocol': 'http',
384 'vcs.hooks.direct_calls': False,
387 'vcs.hooks.direct_calls': False,
385 }
388 }
386 controller = StubVCSController(None, settings, None)
389 controller = StubVCSController(None, settings, None)
387 controller._invalidate_cache = mock.Mock()
390 controller._invalidate_cache = mock.Mock()
388 controller.stub_response_body = response_body
391 controller.stub_response_body = response_body
389 self.start_response = mock.Mock()
392 self.start_response = mock.Mock()
390 result = controller._generate_vcs_response(
393 result = controller._generate_vcs_response(
391 environ={}, start_response=self.start_response,
394 environ={}, start_response=self.start_response,
392 repo_path='fake_repo_path',
395 repo_path='fake_repo_path',
393 extras={}, action='push')
396 extras={}, action='push')
394 self.controller = controller
397 self.controller = controller
395 return result
398 return result
396
399
397 def raise_result_iter(self, vcs_kind='repo_locked'):
400 def raise_result_iter(self, vcs_kind='repo_locked'):
398 """
401 """
399 Simulates an exception due to a vcs raised exception if kind vcs_kind
402 Simulates an exception due to a vcs raised exception if kind vcs_kind
400 """
403 """
401 raise self.vcs_exception(vcs_kind=vcs_kind)
404 raise self.vcs_exception(vcs_kind=vcs_kind)
402 yield "never_reached"
405 yield "never_reached"
403
406
404 def vcs_exception(self, vcs_kind='repo_locked'):
407 def vcs_exception(self, vcs_kind='repo_locked'):
405 locked_exception = Exception('TEST_MESSAGE')
408 locked_exception = Exception('TEST_MESSAGE')
406 locked_exception._vcs_kind = vcs_kind
409 locked_exception._vcs_kind = vcs_kind
407 return locked_exception
410 return locked_exception
408
411
409 def was_cache_invalidated(self):
412 def was_cache_invalidated(self):
410 return self.controller._invalidate_cache.called
413 return self.controller._invalidate_cache.called
411
414
412
415
413 class TestInitializeGenerator(object):
416 class TestInitializeGenerator(object):
414
417
415 def test_drains_first_element(self):
418 def test_drains_first_element(self):
416 gen = self.factory(['__init__', 1, 2])
419 gen = self.factory(['__init__', 1, 2])
417 result = list(gen)
420 result = list(gen)
418 assert result == [1, 2]
421 assert result == [1, 2]
419
422
420 @pytest.mark.parametrize('values', [
423 @pytest.mark.parametrize('values', [
421 [],
424 [],
422 [1, 2],
425 [1, 2],
423 ])
426 ])
424 def test_raises_value_error(self, values):
427 def test_raises_value_error(self, values):
425 with pytest.raises(ValueError):
428 with pytest.raises(ValueError):
426 self.factory(values)
429 self.factory(values)
427
430
428 @simplevcs.initialize_generator
431 @simplevcs.initialize_generator
429 def factory(self, iterable):
432 def factory(self, iterable):
430 for elem in iterable:
433 for elem in iterable:
431 yield elem
434 yield elem
432
435
433
436
434 class TestPrepareHooksDaemon(object):
437 class TestPrepareHooksDaemon(object):
435 def test_calls_imported_prepare_callback_daemon(self, app_settings):
438 def test_calls_imported_prepare_callback_daemon(self, app_settings):
436 expected_extras = {'extra1': 'value1'}
439 expected_extras = {'extra1': 'value1'}
437 daemon = DummyHooksCallbackDaemon()
440 daemon = DummyHooksCallbackDaemon()
438
441
439 controller = StubVCSController(None, app_settings, None)
442 controller = StubVCSController(None, app_settings, None)
440 prepare_patcher = mock.patch.object(
443 prepare_patcher = mock.patch.object(
441 simplevcs, 'prepare_callback_daemon',
444 simplevcs, 'prepare_callback_daemon',
442 return_value=(daemon, expected_extras))
445 return_value=(daemon, expected_extras))
443 with prepare_patcher as prepare_mock:
446 with prepare_patcher as prepare_mock:
444 callback_daemon, extras = controller._prepare_callback_daemon(
447 callback_daemon, extras = controller._prepare_callback_daemon(
445 expected_extras.copy())
448 expected_extras.copy())
446 prepare_mock.assert_called_once_with(
449 prepare_mock.assert_called_once_with(
447 expected_extras,
450 expected_extras,
448 protocol=app_settings['vcs.hooks.protocol'],
451 protocol=app_settings['vcs.hooks.protocol'],
449 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
452 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
450
453
451 assert callback_daemon == daemon
454 assert callback_daemon == daemon
452 assert extras == extras
455 assert extras == extras
@@ -1,423 +1,424 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import threading
21 import threading
22 import time
22 import time
23 import logging
23 import logging
24 import os.path
24 import os.path
25 import subprocess32
25 import subprocess32
26 import tempfile
26 import tempfile
27 import urllib2
27 import urllib2
28 from lxml.html import fromstring, tostring
28 from lxml.html import fromstring, tostring
29 from lxml.cssselect import CSSSelector
29 from lxml.cssselect import CSSSelector
30 from urlparse import urlparse, parse_qsl
30 from urlparse import urlparse, parse_qsl
31 from urllib import unquote_plus
31 from urllib import unquote_plus
32 import webob
32
33
33 from webtest.app import (
34 from webtest.app import TestResponse, TestApp, string_types
34 Request, TestResponse, TestApp, print_stderr, string_types)
35 from webtest.compat import print_stderr
35
36
36 import pytest
37 import pytest
37 import rc_testdata
38 import rc_testdata
38
39
39 from rhodecode.model.db import User, Repository
40 from rhodecode.model.db import User, Repository
40 from rhodecode.model.meta import Session
41 from rhodecode.model.meta import Session
41 from rhodecode.model.scm import ScmModel
42 from rhodecode.model.scm import ScmModel
42 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44
45
45
46
46 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
47
48
48
49
49 class CustomTestResponse(TestResponse):
50 class CustomTestResponse(TestResponse):
50 def _save_output(self, out):
51 def _save_output(self, out):
51 f = tempfile.NamedTemporaryFile(
52 f = tempfile.NamedTemporaryFile(
52 delete=False, prefix='rc-test-', suffix='.html')
53 delete=False, prefix='rc-test-', suffix='.html')
53 f.write(out)
54 f.write(out)
54 return f.name
55 return f.name
55
56
56 def mustcontain(self, *strings, **kw):
57 def mustcontain(self, *strings, **kw):
57 """
58 """
58 Assert that the response contains all of the strings passed
59 Assert that the response contains all of the strings passed
59 in as arguments.
60 in as arguments.
60
61
61 Equivalent to::
62 Equivalent to::
62
63
63 assert string in res
64 assert string in res
64 """
65 """
65 if 'no' in kw:
66 if 'no' in kw:
66 no = kw['no']
67 no = kw['no']
67 del kw['no']
68 del kw['no']
68 if isinstance(no, string_types):
69 if isinstance(no, string_types):
69 no = [no]
70 no = [no]
70 else:
71 else:
71 no = []
72 no = []
72 if kw:
73 if kw:
73 raise TypeError(
74 raise TypeError(
74 "The only keyword argument allowed is 'no'")
75 "The only keyword argument allowed is 'no'")
75
76
76 f = self._save_output(str(self))
77 f = self._save_output(str(self))
77
78
78 for s in strings:
79 for s in strings:
79 if not s in self:
80 if not s in self:
80 print_stderr("Actual response (no %r):" % s)
81 print_stderr("Actual response (no %r):" % s)
81 print_stderr(str(self))
82 print_stderr(str(self))
82 raise IndexError(
83 raise IndexError(
83 "Body does not contain string %r, output saved as %s" % (
84 "Body does not contain string %r, output saved as %s" % (
84 s, f))
85 s, f))
85
86
86 for no_s in no:
87 for no_s in no:
87 if no_s in self:
88 if no_s in self:
88 print_stderr("Actual response (has %r)" % no_s)
89 print_stderr("Actual response (has %r)" % no_s)
89 print_stderr(str(self))
90 print_stderr(str(self))
90 raise IndexError(
91 raise IndexError(
91 "Body contains bad string %r, output saved as %s" % (
92 "Body contains bad string %r, output saved as %s" % (
92 no_s, f))
93 no_s, f))
93
94
94 def assert_response(self):
95 def assert_response(self):
95 return AssertResponse(self)
96 return AssertResponse(self)
96
97
97 def get_session_from_response(self):
98 def get_session_from_response(self):
98 """
99 """
99 This returns the session from a response object. Pylons has some magic
100 This returns the session from a response object. Pylons has some magic
100 to make the session available as `response.session`. But pyramid
101 to make the session available as `response.session`. But pyramid
101 doesn't expose it.
102 doesn't expose it.
102 """
103 """
103 return self.request.environ['beaker.session']
104 return self.request.environ['beaker.session']
104
105
105
106
106 class TestRequest(Request):
107 class TestRequest(webob.BaseRequest):
107
108
108 # for py.test
109 # for py.test
109 disabled = True
110 disabled = True
110 ResponseClass = CustomTestResponse
111 ResponseClass = CustomTestResponse
111
112
112
113
113 class CustomTestApp(TestApp):
114 class CustomTestApp(TestApp):
114 """
115 """
115 Custom app to make mustcontain more usefull
116 Custom app to make mustcontain more usefull
116 """
117 """
117 RequestClass = TestRequest
118 RequestClass = TestRequest
118
119
119
120
120 def set_anonymous_access(enabled):
121 def set_anonymous_access(enabled):
121 """(Dis)allows anonymous access depending on parameter `enabled`"""
122 """(Dis)allows anonymous access depending on parameter `enabled`"""
122 user = User.get_default_user()
123 user = User.get_default_user()
123 user.active = enabled
124 user.active = enabled
124 Session().add(user)
125 Session().add(user)
125 Session().commit()
126 Session().commit()
126 time.sleep(1.5) # must sleep for cache (1s to expire)
127 time.sleep(1.5) # must sleep for cache (1s to expire)
127 log.info('anonymous access is now: %s', enabled)
128 log.info('anonymous access is now: %s', enabled)
128 assert enabled == User.get_default_user().active, (
129 assert enabled == User.get_default_user().active, (
129 'Cannot set anonymous access')
130 'Cannot set anonymous access')
130
131
131
132
132 def check_xfail_backends(node, backend_alias):
133 def check_xfail_backends(node, backend_alias):
133 # Using "xfail_backends" here intentionally, since this marks work
134 # Using "xfail_backends" here intentionally, since this marks work
134 # which is "to be done" soon.
135 # which is "to be done" soon.
135 skip_marker = node.get_marker('xfail_backends')
136 skip_marker = node.get_marker('xfail_backends')
136 if skip_marker and backend_alias in skip_marker.args:
137 if skip_marker and backend_alias in skip_marker.args:
137 msg = "Support for backend %s to be developed." % (backend_alias, )
138 msg = "Support for backend %s to be developed." % (backend_alias, )
138 msg = skip_marker.kwargs.get('reason', msg)
139 msg = skip_marker.kwargs.get('reason', msg)
139 pytest.xfail(msg)
140 pytest.xfail(msg)
140
141
141
142
142 def check_skip_backends(node, backend_alias):
143 def check_skip_backends(node, backend_alias):
143 # Using "skip_backends" here intentionally, since this marks work which is
144 # Using "skip_backends" here intentionally, since this marks work which is
144 # not supported.
145 # not supported.
145 skip_marker = node.get_marker('skip_backends')
146 skip_marker = node.get_marker('skip_backends')
146 if skip_marker and backend_alias in skip_marker.args:
147 if skip_marker and backend_alias in skip_marker.args:
147 msg = "Feature not supported for backend %s." % (backend_alias, )
148 msg = "Feature not supported for backend %s." % (backend_alias, )
148 msg = skip_marker.kwargs.get('reason', msg)
149 msg = skip_marker.kwargs.get('reason', msg)
149 pytest.skip(msg)
150 pytest.skip(msg)
150
151
151
152
152 def extract_git_repo_from_dump(dump_name, repo_name):
153 def extract_git_repo_from_dump(dump_name, repo_name):
153 """Create git repo `repo_name` from dump `dump_name`."""
154 """Create git repo `repo_name` from dump `dump_name`."""
154 repos_path = ScmModel().repos_path
155 repos_path = ScmModel().repos_path
155 target_path = os.path.join(repos_path, repo_name)
156 target_path = os.path.join(repos_path, repo_name)
156 rc_testdata.extract_git_dump(dump_name, target_path)
157 rc_testdata.extract_git_dump(dump_name, target_path)
157 return target_path
158 return target_path
158
159
159
160
160 def extract_hg_repo_from_dump(dump_name, repo_name):
161 def extract_hg_repo_from_dump(dump_name, repo_name):
161 """Create hg repo `repo_name` from dump `dump_name`."""
162 """Create hg repo `repo_name` from dump `dump_name`."""
162 repos_path = ScmModel().repos_path
163 repos_path = ScmModel().repos_path
163 target_path = os.path.join(repos_path, repo_name)
164 target_path = os.path.join(repos_path, repo_name)
164 rc_testdata.extract_hg_dump(dump_name, target_path)
165 rc_testdata.extract_hg_dump(dump_name, target_path)
165 return target_path
166 return target_path
166
167
167
168
168 def extract_svn_repo_from_dump(dump_name, repo_name):
169 def extract_svn_repo_from_dump(dump_name, repo_name):
169 """Create a svn repo `repo_name` from dump `dump_name`."""
170 """Create a svn repo `repo_name` from dump `dump_name`."""
170 repos_path = ScmModel().repos_path
171 repos_path = ScmModel().repos_path
171 target_path = os.path.join(repos_path, repo_name)
172 target_path = os.path.join(repos_path, repo_name)
172 SubversionRepository(target_path, create=True)
173 SubversionRepository(target_path, create=True)
173 _load_svn_dump_into_repo(dump_name, target_path)
174 _load_svn_dump_into_repo(dump_name, target_path)
174 return target_path
175 return target_path
175
176
176
177
177 def assert_message_in_log(log_records, message, levelno, module):
178 def assert_message_in_log(log_records, message, levelno, module):
178 messages = [
179 messages = [
179 r.message for r in log_records
180 r.message for r in log_records
180 if r.module == module and r.levelno == levelno
181 if r.module == module and r.levelno == levelno
181 ]
182 ]
182 assert message in messages
183 assert message in messages
183
184
184
185
185 def _load_svn_dump_into_repo(dump_name, repo_path):
186 def _load_svn_dump_into_repo(dump_name, repo_path):
186 """
187 """
187 Utility to populate a svn repository with a named dump
188 Utility to populate a svn repository with a named dump
188
189
189 Currently the dumps are in rc_testdata. They might later on be
190 Currently the dumps are in rc_testdata. They might later on be
190 integrated with the main repository once they stabilize more.
191 integrated with the main repository once they stabilize more.
191 """
192 """
192 dump = rc_testdata.load_svn_dump(dump_name)
193 dump = rc_testdata.load_svn_dump(dump_name)
193 load_dump = subprocess32.Popen(
194 load_dump = subprocess32.Popen(
194 ['svnadmin', 'load', repo_path],
195 ['svnadmin', 'load', repo_path],
195 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
196 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
196 stderr=subprocess32.PIPE)
197 stderr=subprocess32.PIPE)
197 out, err = load_dump.communicate(dump)
198 out, err = load_dump.communicate(dump)
198 if load_dump.returncode != 0:
199 if load_dump.returncode != 0:
199 log.error("Output of load_dump command: %s", out)
200 log.error("Output of load_dump command: %s", out)
200 log.error("Error output of load_dump command: %s", err)
201 log.error("Error output of load_dump command: %s", err)
201 raise Exception(
202 raise Exception(
202 'Failed to load dump "%s" into repository at path "%s".'
203 'Failed to load dump "%s" into repository at path "%s".'
203 % (dump_name, repo_path))
204 % (dump_name, repo_path))
204
205
205
206
206 class AssertResponse(object):
207 class AssertResponse(object):
207 """
208 """
208 Utility that helps to assert things about a given HTML response.
209 Utility that helps to assert things about a given HTML response.
209 """
210 """
210
211
211 def __init__(self, response):
212 def __init__(self, response):
212 self.response = response
213 self.response = response
213
214
214 def get_imports(self):
215 def get_imports(self):
215 return fromstring, tostring, CSSSelector
216 return fromstring, tostring, CSSSelector
216
217
217 def one_element_exists(self, css_selector):
218 def one_element_exists(self, css_selector):
218 self.get_element(css_selector)
219 self.get_element(css_selector)
219
220
220 def no_element_exists(self, css_selector):
221 def no_element_exists(self, css_selector):
221 assert not self._get_elements(css_selector)
222 assert not self._get_elements(css_selector)
222
223
223 def element_equals_to(self, css_selector, expected_content):
224 def element_equals_to(self, css_selector, expected_content):
224 element = self.get_element(css_selector)
225 element = self.get_element(css_selector)
225 element_text = self._element_to_string(element)
226 element_text = self._element_to_string(element)
226 assert expected_content in element_text
227 assert expected_content in element_text
227
228
228 def element_contains(self, css_selector, expected_content):
229 def element_contains(self, css_selector, expected_content):
229 element = self.get_element(css_selector)
230 element = self.get_element(css_selector)
230 assert expected_content in element.text_content()
231 assert expected_content in element.text_content()
231
232
232 def element_value_contains(self, css_selector, expected_content):
233 def element_value_contains(self, css_selector, expected_content):
233 element = self.get_element(css_selector)
234 element = self.get_element(css_selector)
234 assert expected_content in element.value
235 assert expected_content in element.value
235
236
236 def contains_one_link(self, link_text, href):
237 def contains_one_link(self, link_text, href):
237 fromstring, tostring, CSSSelector = self.get_imports()
238 fromstring, tostring, CSSSelector = self.get_imports()
238 doc = fromstring(self.response.body)
239 doc = fromstring(self.response.body)
239 sel = CSSSelector('a[href]')
240 sel = CSSSelector('a[href]')
240 elements = [
241 elements = [
241 e for e in sel(doc) if e.text_content().strip() == link_text]
242 e for e in sel(doc) if e.text_content().strip() == link_text]
242 assert len(elements) == 1, "Did not find link or found multiple links"
243 assert len(elements) == 1, "Did not find link or found multiple links"
243 self._ensure_url_equal(elements[0].attrib.get('href'), href)
244 self._ensure_url_equal(elements[0].attrib.get('href'), href)
244
245
245 def contains_one_anchor(self, anchor_id):
246 def contains_one_anchor(self, anchor_id):
246 fromstring, tostring, CSSSelector = self.get_imports()
247 fromstring, tostring, CSSSelector = self.get_imports()
247 doc = fromstring(self.response.body)
248 doc = fromstring(self.response.body)
248 sel = CSSSelector('#' + anchor_id)
249 sel = CSSSelector('#' + anchor_id)
249 elements = sel(doc)
250 elements = sel(doc)
250 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
251 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
251
252
252 def _ensure_url_equal(self, found, expected):
253 def _ensure_url_equal(self, found, expected):
253 assert _Url(found) == _Url(expected)
254 assert _Url(found) == _Url(expected)
254
255
255 def get_element(self, css_selector):
256 def get_element(self, css_selector):
256 elements = self._get_elements(css_selector)
257 elements = self._get_elements(css_selector)
257 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
258 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
258 return elements[0]
259 return elements[0]
259
260
260 def get_elements(self, css_selector):
261 def get_elements(self, css_selector):
261 return self._get_elements(css_selector)
262 return self._get_elements(css_selector)
262
263
263 def _get_elements(self, css_selector):
264 def _get_elements(self, css_selector):
264 fromstring, tostring, CSSSelector = self.get_imports()
265 fromstring, tostring, CSSSelector = self.get_imports()
265 doc = fromstring(self.response.body)
266 doc = fromstring(self.response.body)
266 sel = CSSSelector(css_selector)
267 sel = CSSSelector(css_selector)
267 elements = sel(doc)
268 elements = sel(doc)
268 return elements
269 return elements
269
270
270 def _element_to_string(self, element):
271 def _element_to_string(self, element):
271 fromstring, tostring, CSSSelector = self.get_imports()
272 fromstring, tostring, CSSSelector = self.get_imports()
272 return tostring(element)
273 return tostring(element)
273
274
274
275
275 class _Url(object):
276 class _Url(object):
276 """
277 """
277 A url object that can be compared with other url orbjects
278 A url object that can be compared with other url orbjects
278 without regard to the vagaries of encoding, escaping, and ordering
279 without regard to the vagaries of encoding, escaping, and ordering
279 of parameters in query strings.
280 of parameters in query strings.
280
281
281 Inspired by
282 Inspired by
282 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
283 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
283 """
284 """
284
285
285 def __init__(self, url):
286 def __init__(self, url):
286 parts = urlparse(url)
287 parts = urlparse(url)
287 _query = frozenset(parse_qsl(parts.query))
288 _query = frozenset(parse_qsl(parts.query))
288 _path = unquote_plus(parts.path)
289 _path = unquote_plus(parts.path)
289 parts = parts._replace(query=_query, path=_path)
290 parts = parts._replace(query=_query, path=_path)
290 self.parts = parts
291 self.parts = parts
291
292
292 def __eq__(self, other):
293 def __eq__(self, other):
293 return self.parts == other.parts
294 return self.parts == other.parts
294
295
295 def __hash__(self):
296 def __hash__(self):
296 return hash(self.parts)
297 return hash(self.parts)
297
298
298
299
299 def run_test_concurrently(times, raise_catched_exc=True):
300 def run_test_concurrently(times, raise_catched_exc=True):
300 """
301 """
301 Add this decorator to small pieces of code that you want to test
302 Add this decorator to small pieces of code that you want to test
302 concurrently
303 concurrently
303
304
304 ex:
305 ex:
305
306
306 @test_concurrently(25)
307 @test_concurrently(25)
307 def my_test_function():
308 def my_test_function():
308 ...
309 ...
309 """
310 """
310 def test_concurrently_decorator(test_func):
311 def test_concurrently_decorator(test_func):
311 def wrapper(*args, **kwargs):
312 def wrapper(*args, **kwargs):
312 exceptions = []
313 exceptions = []
313
314
314 def call_test_func():
315 def call_test_func():
315 try:
316 try:
316 test_func(*args, **kwargs)
317 test_func(*args, **kwargs)
317 except Exception as e:
318 except Exception as e:
318 exceptions.append(e)
319 exceptions.append(e)
319 if raise_catched_exc:
320 if raise_catched_exc:
320 raise
321 raise
321 threads = []
322 threads = []
322 for i in range(times):
323 for i in range(times):
323 threads.append(threading.Thread(target=call_test_func))
324 threads.append(threading.Thread(target=call_test_func))
324 for t in threads:
325 for t in threads:
325 t.start()
326 t.start()
326 for t in threads:
327 for t in threads:
327 t.join()
328 t.join()
328 if exceptions:
329 if exceptions:
329 raise Exception(
330 raise Exception(
330 'test_concurrently intercepted %s exceptions: %s' % (
331 'test_concurrently intercepted %s exceptions: %s' % (
331 len(exceptions), exceptions))
332 len(exceptions), exceptions))
332 return wrapper
333 return wrapper
333 return test_concurrently_decorator
334 return test_concurrently_decorator
334
335
335
336
336 def wait_for_url(url, timeout=10):
337 def wait_for_url(url, timeout=10):
337 """
338 """
338 Wait until URL becomes reachable.
339 Wait until URL becomes reachable.
339
340
340 It polls the URL until the timeout is reached or it became reachable.
341 It polls the URL until the timeout is reached or it became reachable.
341 If will call to `py.test.fail` in case the URL is not reachable.
342 If will call to `py.test.fail` in case the URL is not reachable.
342 """
343 """
343 timeout = time.time() + timeout
344 timeout = time.time() + timeout
344 last = 0
345 last = 0
345 wait = 0.1
346 wait = 0.1
346
347
347 while timeout > last:
348 while timeout > last:
348 last = time.time()
349 last = time.time()
349 if is_url_reachable(url):
350 if is_url_reachable(url):
350 break
351 break
351 elif (last + wait) > time.time():
352 elif (last + wait) > time.time():
352 # Go to sleep because not enough time has passed since last check.
353 # Go to sleep because not enough time has passed since last check.
353 time.sleep(wait)
354 time.sleep(wait)
354 else:
355 else:
355 pytest.fail("Timeout while waiting for URL {}".format(url))
356 pytest.fail("Timeout while waiting for URL {}".format(url))
356
357
357
358
358 def is_url_reachable(url):
359 def is_url_reachable(url):
359 try:
360 try:
360 urllib2.urlopen(url)
361 urllib2.urlopen(url)
361 except urllib2.URLError:
362 except urllib2.URLError:
362 return False
363 return False
363 return True
364 return True
364
365
365
366
366 def repo_on_filesystem(repo_name):
367 def repo_on_filesystem(repo_name):
367 from rhodecode.lib import vcs
368 from rhodecode.lib import vcs
368 from rhodecode.tests import TESTS_TMP_PATH
369 from rhodecode.tests import TESTS_TMP_PATH
369 repo = vcs.get_vcs_instance(
370 repo = vcs.get_vcs_instance(
370 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
371 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
371 return repo is not None
372 return repo is not None
372
373
373
374
374 def commit_change(
375 def commit_change(
375 repo, filename, content, message, vcs_type, parent=None, newfile=False):
376 repo, filename, content, message, vcs_type, parent=None, newfile=False):
376 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
377 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
377
378
378 repo = Repository.get_by_repo_name(repo)
379 repo = Repository.get_by_repo_name(repo)
379 _commit = parent
380 _commit = parent
380 if not parent:
381 if not parent:
381 _commit = EmptyCommit(alias=vcs_type)
382 _commit = EmptyCommit(alias=vcs_type)
382
383
383 if newfile:
384 if newfile:
384 nodes = {
385 nodes = {
385 filename: {
386 filename: {
386 'content': content
387 'content': content
387 }
388 }
388 }
389 }
389 commit = ScmModel().create_nodes(
390 commit = ScmModel().create_nodes(
390 user=TEST_USER_ADMIN_LOGIN, repo=repo,
391 user=TEST_USER_ADMIN_LOGIN, repo=repo,
391 message=message,
392 message=message,
392 nodes=nodes,
393 nodes=nodes,
393 parent_commit=_commit,
394 parent_commit=_commit,
394 author=TEST_USER_ADMIN_LOGIN,
395 author=TEST_USER_ADMIN_LOGIN,
395 )
396 )
396 else:
397 else:
397 commit = ScmModel().commit_change(
398 commit = ScmModel().commit_change(
398 repo=repo.scm_instance(), repo_name=repo.repo_name,
399 repo=repo.scm_instance(), repo_name=repo.repo_name,
399 commit=parent, user=TEST_USER_ADMIN_LOGIN,
400 commit=parent, user=TEST_USER_ADMIN_LOGIN,
400 author=TEST_USER_ADMIN_LOGIN,
401 author=TEST_USER_ADMIN_LOGIN,
401 message=message,
402 message=message,
402 content=content,
403 content=content,
403 f_path=filename
404 f_path=filename
404 )
405 )
405 return commit
406 return commit
406
407
407
408
408 def add_test_routes(config):
409 def add_test_routes(config):
409 """
410 """
410 Adds test routing that can be used in different functional tests
411 Adds test routing that can be used in different functional tests
411
412
412 """
413 """
413 config.add_route(name='home', pattern='/')
414 config.add_route(name='home', pattern='/')
414 config.add_route(name='repo_summary', pattern='/{repo_name}')
415 config.add_route(name='repo_summary', pattern='/{repo_name}')
415 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
416 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
416 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
417 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
417
418
418 config.add_route(name='pullrequest_show',
419 config.add_route(name='pullrequest_show',
419 pattern='/{repo_name}/pull-request/{pull_request_id}')
420 pattern='/{repo_name}/pull-request/{pull_request_id}')
420 config.add_route(name='pull_requests_global',
421 config.add_route(name='pull_requests_global',
421 pattern='/pull-request/{pull_request_id}')
422 pattern='/pull-request/{pull_request_id}')
422 config.add_route(name='repo_commit',
423 config.add_route(name='repo_commit',
423 pattern='/{repo_name}/changeset/{commit_id}')
424 pattern='/{repo_name}/changeset/{commit_id}')
General Comments 0
You need to be logged in to leave comments. Login now