Show More
@@ -1,580 +1,580 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import urlparse | |
|
21 | import urllib.parse | |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | 26 | from rhodecode.tests import ( |
|
27 | 27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, |
|
28 | 28 | no_newline_id_generator) |
|
29 | 29 | from rhodecode.tests.fixture import Fixture |
|
30 | 30 | from rhodecode.lib.auth import check_password |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.model.auth_token import AuthTokenModel |
|
33 | 33 | from rhodecode.model.db import User, Notification, UserApiKeys |
|
34 | 34 | from rhodecode.model.meta import Session |
|
35 | 35 | |
|
36 | 36 | fixture = Fixture() |
|
37 | 37 | |
|
38 | 38 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | def route_path(name, params=None, **kwargs): |
|
42 | 42 | import urllib.request, urllib.parse, urllib.error |
|
43 | 43 | from rhodecode.apps._base import ADMIN_PREFIX |
|
44 | 44 | |
|
45 | 45 | base_url = { |
|
46 | 46 | 'login': ADMIN_PREFIX + '/login', |
|
47 | 47 | 'logout': ADMIN_PREFIX + '/logout', |
|
48 | 48 | 'register': ADMIN_PREFIX + '/register', |
|
49 | 49 | 'reset_password': |
|
50 | 50 | ADMIN_PREFIX + '/password_reset', |
|
51 | 51 | 'reset_password_confirmation': |
|
52 | 52 | ADMIN_PREFIX + '/password_reset_confirmation', |
|
53 | 53 | |
|
54 | 54 | 'admin_permissions_application': |
|
55 | 55 | ADMIN_PREFIX + '/permissions/application', |
|
56 | 56 | 'admin_permissions_application_update': |
|
57 | 57 | ADMIN_PREFIX + '/permissions/application/update', |
|
58 | 58 | |
|
59 | 59 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' |
|
60 | 60 | |
|
61 | 61 | }[name].format(**kwargs) |
|
62 | 62 | |
|
63 | 63 | if params: |
|
64 | 64 | base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params)) |
|
65 | 65 | return base_url |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | @pytest.mark.usefixtures('app') |
|
69 | 69 | class TestLoginController(object): |
|
70 | 70 | destroy_users = set() |
|
71 | 71 | |
|
72 | 72 | @classmethod |
|
73 | 73 | def teardown_class(cls): |
|
74 | 74 | fixture.destroy_users(cls.destroy_users) |
|
75 | 75 | |
|
76 | 76 | def teardown_method(self, method): |
|
77 | 77 | for n in Notification.query().all(): |
|
78 | 78 | Session().delete(n) |
|
79 | 79 | |
|
80 | 80 | Session().commit() |
|
81 | 81 | assert Notification.query().all() == [] |
|
82 | 82 | |
|
83 | 83 | def test_index(self): |
|
84 | 84 | response = self.app.get(route_path('login')) |
|
85 | 85 | assert response.status == '200 OK' |
|
86 | 86 | # Test response... |
|
87 | 87 | |
|
88 | 88 | def test_login_admin_ok(self): |
|
89 | 89 | response = self.app.post(route_path('login'), |
|
90 | 90 | {'username': 'test_admin', |
|
91 | 91 | 'password': 'test12'}, status=302) |
|
92 | 92 | response = response.follow() |
|
93 | 93 | session = response.get_session_from_response() |
|
94 | 94 | username = session['rhodecode_user'].get('username') |
|
95 | 95 | assert username == 'test_admin' |
|
96 | 96 | response.mustcontain('logout') |
|
97 | 97 | |
|
98 | 98 | def test_login_regular_ok(self): |
|
99 | 99 | response = self.app.post(route_path('login'), |
|
100 | 100 | {'username': 'test_regular', |
|
101 | 101 | 'password': 'test12'}, status=302) |
|
102 | 102 | |
|
103 | 103 | response = response.follow() |
|
104 | 104 | session = response.get_session_from_response() |
|
105 | 105 | username = session['rhodecode_user'].get('username') |
|
106 | 106 | assert username == 'test_regular' |
|
107 | 107 | response.mustcontain('logout') |
|
108 | 108 | |
|
109 | 109 | def test_login_regular_forbidden_when_super_admin_restriction(self): |
|
110 | 110 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
111 | 111 | with fixture.auth_restriction(self.app._pyramid_registry, |
|
112 | 112 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN): |
|
113 | 113 | response = self.app.post(route_path('login'), |
|
114 | 114 | {'username': 'test_regular', |
|
115 | 115 | 'password': 'test12'}) |
|
116 | 116 | |
|
117 | 117 | response.mustcontain('invalid user name') |
|
118 | 118 | response.mustcontain('invalid password') |
|
119 | 119 | |
|
120 | 120 | def test_login_regular_forbidden_when_scope_restriction(self): |
|
121 | 121 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
122 | 122 | with fixture.scope_restriction(self.app._pyramid_registry, |
|
123 | 123 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS): |
|
124 | 124 | response = self.app.post(route_path('login'), |
|
125 | 125 | {'username': 'test_regular', |
|
126 | 126 | 'password': 'test12'}) |
|
127 | 127 | |
|
128 | 128 | response.mustcontain('invalid user name') |
|
129 | 129 | response.mustcontain('invalid password') |
|
130 | 130 | |
|
131 | 131 | def test_login_ok_came_from(self): |
|
132 | 132 | test_came_from = '/_admin/users?branch=stable' |
|
133 | 133 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) |
|
134 | 134 | response = self.app.post( |
|
135 | 135 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) |
|
136 | 136 | |
|
137 | 137 | assert 'branch=stable' in response.location |
|
138 | 138 | response = response.follow() |
|
139 | 139 | |
|
140 | 140 | assert response.status == '200 OK' |
|
141 | 141 | response.mustcontain('Users administration') |
|
142 | 142 | |
|
143 | 143 | def test_redirect_to_login_with_get_args(self): |
|
144 | 144 | with fixture.anon_access(False): |
|
145 | 145 | kwargs = {'branch': 'stable'} |
|
146 | 146 | response = self.app.get( |
|
147 | 147 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs), |
|
148 | 148 | status=302) |
|
149 | 149 | |
|
150 | response_query = urlparse.parse_qsl(response.location) | |
|
150 | response_query = urllib.parse.urlparse.parse_qsl(response.location) | |
|
151 | 151 | assert 'branch=stable' in response_query[0][1] |
|
152 | 152 | |
|
153 | 153 | def test_login_form_with_get_args(self): |
|
154 | 154 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) |
|
155 | 155 | response = self.app.get(_url) |
|
156 | 156 | assert 'branch%3Dstable' in response.form.action |
|
157 | 157 | |
|
158 | 158 | @pytest.mark.parametrize("url_came_from", [ |
|
159 | 159 | 'data:text/html,<script>window.alert("xss")</script>', |
|
160 | 160 | 'mailto:test@rhodecode.org', |
|
161 | 161 | 'file:///etc/passwd', |
|
162 | 162 | 'ftp://some.ftp.server', |
|
163 | 163 | 'http://other.domain', |
|
164 | 164 | '/\r\nX-Forwarded-Host: http://example.org', |
|
165 | 165 | ], ids=no_newline_id_generator) |
|
166 | 166 | def test_login_bad_came_froms(self, url_came_from): |
|
167 | 167 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) |
|
168 | 168 | response = self.app.post( |
|
169 | 169 | _url, |
|
170 | 170 | {'username': 'test_admin', 'password': 'test12'}) |
|
171 | 171 | assert response.status == '302 Found' |
|
172 | 172 | response = response.follow() |
|
173 | 173 | assert response.status == '200 OK' |
|
174 | 174 | assert response.request.path == '/' |
|
175 | 175 | |
|
176 | 176 | def test_login_short_password(self): |
|
177 | 177 | response = self.app.post(route_path('login'), |
|
178 | 178 | {'username': 'test_admin', |
|
179 | 179 | 'password': 'as'}) |
|
180 | 180 | assert response.status == '200 OK' |
|
181 | 181 | |
|
182 | 182 | response.mustcontain('Enter 3 characters or more') |
|
183 | 183 | |
|
184 | 184 | def test_login_wrong_non_ascii_password(self, user_regular): |
|
185 | 185 | response = self.app.post( |
|
186 | 186 | route_path('login'), |
|
187 | 187 | {'username': user_regular.username, |
|
188 | 188 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) |
|
189 | 189 | |
|
190 | 190 | response.mustcontain('invalid user name') |
|
191 | 191 | response.mustcontain('invalid password') |
|
192 | 192 | |
|
193 | 193 | def test_login_with_non_ascii_password(self, user_util): |
|
194 | 194 | password = u'valid-non-ascii\xe4' |
|
195 | 195 | user = user_util.create_user(password=password) |
|
196 | 196 | response = self.app.post( |
|
197 | 197 | route_path('login'), |
|
198 | 198 | {'username': user.username, |
|
199 | 199 | 'password': password.encode('utf-8')}) |
|
200 | 200 | assert response.status_code == 302 |
|
201 | 201 | |
|
202 | 202 | def test_login_wrong_username_password(self): |
|
203 | 203 | response = self.app.post(route_path('login'), |
|
204 | 204 | {'username': 'error', |
|
205 | 205 | 'password': 'test12'}) |
|
206 | 206 | |
|
207 | 207 | response.mustcontain('invalid user name') |
|
208 | 208 | response.mustcontain('invalid password') |
|
209 | 209 | |
|
210 | 210 | def test_login_admin_ok_password_migration(self, real_crypto_backend): |
|
211 | 211 | from rhodecode.lib import auth |
|
212 | 212 | |
|
213 | 213 | # create new user, with sha256 password |
|
214 | 214 | temp_user = 'test_admin_sha256' |
|
215 | 215 | user = fixture.create_user(temp_user) |
|
216 | 216 | user.password = auth._RhodeCodeCryptoSha256().hash_create( |
|
217 | 217 | b'test123') |
|
218 | 218 | Session().add(user) |
|
219 | 219 | Session().commit() |
|
220 | 220 | self.destroy_users.add(temp_user) |
|
221 | 221 | response = self.app.post(route_path('login'), |
|
222 | 222 | {'username': temp_user, |
|
223 | 223 | 'password': 'test123'}, status=302) |
|
224 | 224 | |
|
225 | 225 | response = response.follow() |
|
226 | 226 | session = response.get_session_from_response() |
|
227 | 227 | username = session['rhodecode_user'].get('username') |
|
228 | 228 | assert username == temp_user |
|
229 | 229 | response.mustcontain('logout') |
|
230 | 230 | |
|
231 | 231 | # new password should be bcrypted, after log-in and transfer |
|
232 | 232 | user = User.get_by_username(temp_user) |
|
233 | 233 | assert user.password.startswith('$') |
|
234 | 234 | |
|
235 | 235 | # REGISTRATIONS |
|
236 | 236 | def test_register(self): |
|
237 | 237 | response = self.app.get(route_path('register')) |
|
238 | 238 | response.mustcontain('Create an Account') |
|
239 | 239 | |
|
240 | 240 | def test_register_err_same_username(self): |
|
241 | 241 | uname = 'test_admin' |
|
242 | 242 | response = self.app.post( |
|
243 | 243 | route_path('register'), |
|
244 | 244 | { |
|
245 | 245 | 'username': uname, |
|
246 | 246 | 'password': 'test12', |
|
247 | 247 | 'password_confirmation': 'test12', |
|
248 | 248 | 'email': 'goodmail@domain.com', |
|
249 | 249 | 'firstname': 'test', |
|
250 | 250 | 'lastname': 'test' |
|
251 | 251 | } |
|
252 | 252 | ) |
|
253 | 253 | |
|
254 | 254 | assertr = response.assert_response() |
|
255 | 255 | msg = 'Username "%(username)s" already exists' |
|
256 | 256 | msg = msg % {'username': uname} |
|
257 | 257 | assertr.element_contains('#username+.error-message', msg) |
|
258 | 258 | |
|
259 | 259 | def test_register_err_same_email(self): |
|
260 | 260 | response = self.app.post( |
|
261 | 261 | route_path('register'), |
|
262 | 262 | { |
|
263 | 263 | 'username': 'test_admin_0', |
|
264 | 264 | 'password': 'test12', |
|
265 | 265 | 'password_confirmation': 'test12', |
|
266 | 266 | 'email': 'test_admin@mail.com', |
|
267 | 267 | 'firstname': 'test', |
|
268 | 268 | 'lastname': 'test' |
|
269 | 269 | } |
|
270 | 270 | ) |
|
271 | 271 | |
|
272 | 272 | assertr = response.assert_response() |
|
273 | 273 | msg = u'This e-mail address is already taken' |
|
274 | 274 | assertr.element_contains('#email+.error-message', msg) |
|
275 | 275 | |
|
276 | 276 | def test_register_err_same_email_case_sensitive(self): |
|
277 | 277 | response = self.app.post( |
|
278 | 278 | route_path('register'), |
|
279 | 279 | { |
|
280 | 280 | 'username': 'test_admin_1', |
|
281 | 281 | 'password': 'test12', |
|
282 | 282 | 'password_confirmation': 'test12', |
|
283 | 283 | 'email': 'TesT_Admin@mail.COM', |
|
284 | 284 | 'firstname': 'test', |
|
285 | 285 | 'lastname': 'test' |
|
286 | 286 | } |
|
287 | 287 | ) |
|
288 | 288 | assertr = response.assert_response() |
|
289 | 289 | msg = u'This e-mail address is already taken' |
|
290 | 290 | assertr.element_contains('#email+.error-message', msg) |
|
291 | 291 | |
|
292 | 292 | def test_register_err_wrong_data(self): |
|
293 | 293 | response = self.app.post( |
|
294 | 294 | route_path('register'), |
|
295 | 295 | { |
|
296 | 296 | 'username': 'xs', |
|
297 | 297 | 'password': 'test', |
|
298 | 298 | 'password_confirmation': 'test', |
|
299 | 299 | 'email': 'goodmailm', |
|
300 | 300 | 'firstname': 'test', |
|
301 | 301 | 'lastname': 'test' |
|
302 | 302 | } |
|
303 | 303 | ) |
|
304 | 304 | assert response.status == '200 OK' |
|
305 | 305 | response.mustcontain('An email address must contain a single @') |
|
306 | 306 | response.mustcontain('Enter a value 6 characters long or more') |
|
307 | 307 | |
|
308 | 308 | def test_register_err_username(self): |
|
309 | 309 | response = self.app.post( |
|
310 | 310 | route_path('register'), |
|
311 | 311 | { |
|
312 | 312 | 'username': 'error user', |
|
313 | 313 | 'password': 'test12', |
|
314 | 314 | 'password_confirmation': 'test12', |
|
315 | 315 | 'email': 'goodmailm', |
|
316 | 316 | 'firstname': 'test', |
|
317 | 317 | 'lastname': 'test' |
|
318 | 318 | } |
|
319 | 319 | ) |
|
320 | 320 | |
|
321 | 321 | response.mustcontain('An email address must contain a single @') |
|
322 | 322 | response.mustcontain( |
|
323 | 323 | 'Username may only contain ' |
|
324 | 324 | 'alphanumeric characters underscores, ' |
|
325 | 325 | 'periods or dashes and must begin with ' |
|
326 | 326 | 'alphanumeric character') |
|
327 | 327 | |
|
328 | 328 | def test_register_err_case_sensitive(self): |
|
329 | 329 | usr = 'Test_Admin' |
|
330 | 330 | response = self.app.post( |
|
331 | 331 | route_path('register'), |
|
332 | 332 | { |
|
333 | 333 | 'username': usr, |
|
334 | 334 | 'password': 'test12', |
|
335 | 335 | 'password_confirmation': 'test12', |
|
336 | 336 | 'email': 'goodmailm', |
|
337 | 337 | 'firstname': 'test', |
|
338 | 338 | 'lastname': 'test' |
|
339 | 339 | } |
|
340 | 340 | ) |
|
341 | 341 | |
|
342 | 342 | assertr = response.assert_response() |
|
343 | 343 | msg = u'Username "%(username)s" already exists' |
|
344 | 344 | msg = msg % {'username': usr} |
|
345 | 345 | assertr.element_contains('#username+.error-message', msg) |
|
346 | 346 | |
|
347 | 347 | def test_register_special_chars(self): |
|
348 | 348 | response = self.app.post( |
|
349 | 349 | route_path('register'), |
|
350 | 350 | { |
|
351 | 351 | 'username': 'xxxaxn', |
|
352 | 352 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
353 | 353 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
354 | 354 | 'email': 'goodmailm@test.plx', |
|
355 | 355 | 'firstname': 'test', |
|
356 | 356 | 'lastname': 'test' |
|
357 | 357 | } |
|
358 | 358 | ) |
|
359 | 359 | |
|
360 | 360 | msg = u'Invalid characters (non-ascii) in password' |
|
361 | 361 | response.mustcontain(msg) |
|
362 | 362 | |
|
363 | 363 | def test_register_password_mismatch(self): |
|
364 | 364 | response = self.app.post( |
|
365 | 365 | route_path('register'), |
|
366 | 366 | { |
|
367 | 367 | 'username': 'xs', |
|
368 | 368 | 'password': '123qwe', |
|
369 | 369 | 'password_confirmation': 'qwe123', |
|
370 | 370 | 'email': 'goodmailm@test.plxa', |
|
371 | 371 | 'firstname': 'test', |
|
372 | 372 | 'lastname': 'test' |
|
373 | 373 | } |
|
374 | 374 | ) |
|
375 | 375 | msg = u'Passwords do not match' |
|
376 | 376 | response.mustcontain(msg) |
|
377 | 377 | |
|
378 | 378 | def test_register_ok(self): |
|
379 | 379 | username = 'test_regular4' |
|
380 | 380 | password = 'qweqwe' |
|
381 | 381 | email = 'marcin@test.com' |
|
382 | 382 | name = 'testname' |
|
383 | 383 | lastname = 'testlastname' |
|
384 | 384 | |
|
385 | 385 | # this initializes a session |
|
386 | 386 | response = self.app.get(route_path('register')) |
|
387 | 387 | response.mustcontain('Create an Account') |
|
388 | 388 | |
|
389 | 389 | |
|
390 | 390 | response = self.app.post( |
|
391 | 391 | route_path('register'), |
|
392 | 392 | { |
|
393 | 393 | 'username': username, |
|
394 | 394 | 'password': password, |
|
395 | 395 | 'password_confirmation': password, |
|
396 | 396 | 'email': email, |
|
397 | 397 | 'firstname': name, |
|
398 | 398 | 'lastname': lastname, |
|
399 | 399 | 'admin': True |
|
400 | 400 | }, |
|
401 | 401 | status=302 |
|
402 | 402 | ) # This should be overridden |
|
403 | 403 | |
|
404 | 404 | assert_session_flash( |
|
405 | 405 | response, 'You have successfully registered with RhodeCode. You can log-in now.') |
|
406 | 406 | |
|
407 | 407 | ret = Session().query(User).filter( |
|
408 | 408 | User.username == 'test_regular4').one() |
|
409 | 409 | assert ret.username == username |
|
410 | 410 | assert check_password(password, ret.password) |
|
411 | 411 | assert ret.email == email |
|
412 | 412 | assert ret.name == name |
|
413 | 413 | assert ret.lastname == lastname |
|
414 | 414 | assert ret.auth_tokens is not None |
|
415 | 415 | assert not ret.admin |
|
416 | 416 | |
|
417 | 417 | def test_forgot_password_wrong_mail(self): |
|
418 | 418 | bad_email = 'marcin@wrongmail.org' |
|
419 | 419 | # this initializes a session |
|
420 | 420 | self.app.get(route_path('reset_password')) |
|
421 | 421 | |
|
422 | 422 | response = self.app.post( |
|
423 | 423 | route_path('reset_password'), {'email': bad_email, } |
|
424 | 424 | ) |
|
425 | 425 | assert_session_flash(response, |
|
426 | 426 | 'If such email exists, a password reset link was sent to it.') |
|
427 | 427 | |
|
428 | 428 | def test_forgot_password(self, user_util): |
|
429 | 429 | # this initializes a session |
|
430 | 430 | self.app.get(route_path('reset_password')) |
|
431 | 431 | |
|
432 | 432 | user = user_util.create_user() |
|
433 | 433 | user_id = user.user_id |
|
434 | 434 | email = user.email |
|
435 | 435 | |
|
436 | 436 | response = self.app.post(route_path('reset_password'), {'email': email, }) |
|
437 | 437 | |
|
438 | 438 | assert_session_flash(response, |
|
439 | 439 | 'If such email exists, a password reset link was sent to it.') |
|
440 | 440 | |
|
441 | 441 | # BAD KEY |
|
442 | 442 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') |
|
443 | 443 | response = self.app.get(confirm_url, status=302) |
|
444 | 444 | assert response.location.endswith(route_path('reset_password')) |
|
445 | 445 | assert_session_flash(response, 'Given reset token is invalid') |
|
446 | 446 | |
|
447 | 447 | response.follow() # cleanup flash |
|
448 | 448 | |
|
449 | 449 | # GOOD KEY |
|
450 | 450 | key = UserApiKeys.query()\ |
|
451 | 451 | .filter(UserApiKeys.user_id == user_id)\ |
|
452 | 452 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ |
|
453 | 453 | .first() |
|
454 | 454 | |
|
455 | 455 | assert key |
|
456 | 456 | |
|
457 | 457 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) |
|
458 | 458 | response = self.app.get(confirm_url) |
|
459 | 459 | assert response.status == '302 Found' |
|
460 | 460 | assert response.location.endswith(route_path('login')) |
|
461 | 461 | |
|
462 | 462 | assert_session_flash( |
|
463 | 463 | response, |
|
464 | 464 | 'Your password reset was successful, ' |
|
465 | 465 | 'a new password has been sent to your email') |
|
466 | 466 | |
|
467 | 467 | response.follow() |
|
468 | 468 | |
|
469 | 469 | def _get_api_whitelist(self, values=None): |
|
470 | 470 | config = {'api_access_controllers_whitelist': values or []} |
|
471 | 471 | return config |
|
472 | 472 | |
|
473 | 473 | @pytest.mark.parametrize("test_name, auth_token", [ |
|
474 | 474 | ('none', None), |
|
475 | 475 | ('empty_string', ''), |
|
476 | 476 | ('fake_number', '123456'), |
|
477 | 477 | ('proper_auth_token', None) |
|
478 | 478 | ]) |
|
479 | 479 | def test_access_not_whitelisted_page_via_auth_token( |
|
480 | 480 | self, test_name, auth_token, user_admin): |
|
481 | 481 | |
|
482 | 482 | whitelist = self._get_api_whitelist([]) |
|
483 | 483 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
484 | 484 | assert [] == whitelist['api_access_controllers_whitelist'] |
|
485 | 485 | if test_name == 'proper_auth_token': |
|
486 | 486 | # use builtin if api_key is None |
|
487 | 487 | auth_token = user_admin.api_key |
|
488 | 488 | |
|
489 | 489 | with fixture.anon_access(False): |
|
490 | 490 | self.app.get( |
|
491 | 491 | route_path('repo_commit_raw', |
|
492 | 492 | repo_name=HG_REPO, commit_id='tip', |
|
493 | 493 | params=dict(api_key=auth_token)), |
|
494 | 494 | status=302) |
|
495 | 495 | |
|
496 | 496 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
497 | 497 | ('none', None, 302), |
|
498 | 498 | ('empty_string', '', 302), |
|
499 | 499 | ('fake_number', '123456', 302), |
|
500 | 500 | ('proper_auth_token', None, 200) |
|
501 | 501 | ]) |
|
502 | 502 | def test_access_whitelisted_page_via_auth_token( |
|
503 | 503 | self, test_name, auth_token, code, user_admin): |
|
504 | 504 | |
|
505 | 505 | whitelist = self._get_api_whitelist(whitelist_view) |
|
506 | 506 | |
|
507 | 507 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
508 | 508 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] |
|
509 | 509 | |
|
510 | 510 | if test_name == 'proper_auth_token': |
|
511 | 511 | auth_token = user_admin.api_key |
|
512 | 512 | assert auth_token |
|
513 | 513 | |
|
514 | 514 | with fixture.anon_access(False): |
|
515 | 515 | self.app.get( |
|
516 | 516 | route_path('repo_commit_raw', |
|
517 | 517 | repo_name=HG_REPO, commit_id='tip', |
|
518 | 518 | params=dict(api_key=auth_token)), |
|
519 | 519 | status=code) |
|
520 | 520 | |
|
521 | 521 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
522 | 522 | ('proper_auth_token', None, 200), |
|
523 | 523 | ('wrong_auth_token', '123456', 302), |
|
524 | 524 | ]) |
|
525 | 525 | def test_access_whitelisted_page_via_auth_token_bound_to_token( |
|
526 | 526 | self, test_name, auth_token, code, user_admin): |
|
527 | 527 | |
|
528 | 528 | expected_token = auth_token |
|
529 | 529 | if test_name == 'proper_auth_token': |
|
530 | 530 | auth_token = user_admin.api_key |
|
531 | 531 | expected_token = auth_token |
|
532 | 532 | assert auth_token |
|
533 | 533 | |
|
534 | 534 | whitelist = self._get_api_whitelist([ |
|
535 | 535 | 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)]) |
|
536 | 536 | |
|
537 | 537 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
538 | 538 | |
|
539 | 539 | with fixture.anon_access(False): |
|
540 | 540 | self.app.get( |
|
541 | 541 | route_path('repo_commit_raw', |
|
542 | 542 | repo_name=HG_REPO, commit_id='tip', |
|
543 | 543 | params=dict(api_key=auth_token)), |
|
544 | 544 | status=code) |
|
545 | 545 | |
|
546 | 546 | def test_access_page_via_extra_auth_token(self): |
|
547 | 547 | whitelist = self._get_api_whitelist(whitelist_view) |
|
548 | 548 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
549 | 549 | assert whitelist_view == \ |
|
550 | 550 | whitelist['api_access_controllers_whitelist'] |
|
551 | 551 | |
|
552 | 552 | new_auth_token = AuthTokenModel().create( |
|
553 | 553 | TEST_USER_ADMIN_LOGIN, 'test') |
|
554 | 554 | Session().commit() |
|
555 | 555 | with fixture.anon_access(False): |
|
556 | 556 | self.app.get( |
|
557 | 557 | route_path('repo_commit_raw', |
|
558 | 558 | repo_name=HG_REPO, commit_id='tip', |
|
559 | 559 | params=dict(api_key=new_auth_token.api_key)), |
|
560 | 560 | status=200) |
|
561 | 561 | |
|
562 | 562 | def test_access_page_via_expired_auth_token(self): |
|
563 | 563 | whitelist = self._get_api_whitelist(whitelist_view) |
|
564 | 564 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
565 | 565 | assert whitelist_view == \ |
|
566 | 566 | whitelist['api_access_controllers_whitelist'] |
|
567 | 567 | |
|
568 | 568 | new_auth_token = AuthTokenModel().create( |
|
569 | 569 | TEST_USER_ADMIN_LOGIN, 'test') |
|
570 | 570 | Session().commit() |
|
571 | 571 | # patch the api key and make it expired |
|
572 | 572 | new_auth_token.expires = 0 |
|
573 | 573 | Session().add(new_auth_token) |
|
574 | 574 | Session().commit() |
|
575 | 575 | with fixture.anon_access(False): |
|
576 | 576 | self.app.get( |
|
577 | 577 | route_path('repo_commit_raw', |
|
578 | 578 | repo_name=HG_REPO, commit_id='tip', |
|
579 | 579 | params=dict(api_key=new_auth_token.api_key)), |
|
580 | 580 | status=302) |
@@ -1,470 +1,470 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import time |
|
22 | 22 | import collections |
|
23 | 23 | import datetime |
|
24 | 24 | import formencode |
|
25 | 25 | import formencode.htmlfill |
|
26 | 26 | import logging |
|
27 | import urlparse | |
|
27 | import urllib.parse | |
|
28 | 28 | import requests |
|
29 | 29 | |
|
30 | 30 | from pyramid.httpexceptions import HTTPFound |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | from rhodecode.apps._base import BaseAppView |
|
34 | 34 | from rhodecode.authentication.base import authenticate, HTTP_TYPE |
|
35 | 35 | from rhodecode.authentication.plugins import auth_rhodecode |
|
36 | 36 | from rhodecode.events import UserRegistered, trigger |
|
37 | 37 | from rhodecode.lib import helpers as h |
|
38 | 38 | from rhodecode.lib import audit_logger |
|
39 | 39 | from rhodecode.lib.auth import ( |
|
40 | 40 | AuthUser, HasPermissionAnyDecorator, CSRFRequired) |
|
41 | 41 | from rhodecode.lib.base import get_ip_addr |
|
42 | 42 | from rhodecode.lib.exceptions import UserCreationError |
|
43 | 43 | from rhodecode.lib.utils2 import safe_str |
|
44 | 44 | from rhodecode.model.db import User, UserApiKeys |
|
45 | 45 | from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm |
|
46 | 46 | from rhodecode.model.meta import Session |
|
47 | 47 | from rhodecode.model.auth_token import AuthTokenModel |
|
48 | 48 | from rhodecode.model.settings import SettingsModel |
|
49 | 49 | from rhodecode.model.user import UserModel |
|
50 | 50 | from rhodecode.translation import _ |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | log = logging.getLogger(__name__) |
|
54 | 54 | |
|
55 | 55 | CaptchaData = collections.namedtuple( |
|
56 | 56 | 'CaptchaData', 'active, private_key, public_key') |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def store_user_in_session(session, username, remember=False): |
|
60 | 60 | user = User.get_by_username(username, case_insensitive=True) |
|
61 | 61 | auth_user = AuthUser(user.user_id) |
|
62 | 62 | auth_user.set_authenticated() |
|
63 | 63 | cs = auth_user.get_cookie_store() |
|
64 | 64 | session['rhodecode_user'] = cs |
|
65 | 65 | user.update_lastlogin() |
|
66 | 66 | Session().commit() |
|
67 | 67 | |
|
68 | 68 | # If they want to be remembered, update the cookie |
|
69 | 69 | if remember: |
|
70 | 70 | _year = (datetime.datetime.now() + |
|
71 | 71 | datetime.timedelta(seconds=60 * 60 * 24 * 365)) |
|
72 | 72 | session._set_cookie_expires(_year) |
|
73 | 73 | |
|
74 | 74 | session.save() |
|
75 | 75 | |
|
76 | 76 | safe_cs = cs.copy() |
|
77 | 77 | safe_cs['password'] = '****' |
|
78 | 78 | log.info('user %s is now authenticated and stored in ' |
|
79 | 79 | 'session, session attrs %s', username, safe_cs) |
|
80 | 80 | |
|
81 | 81 | # dumps session attrs back to cookie |
|
82 | 82 | session._update_cookie_out() |
|
83 | 83 | # we set new cookie |
|
84 | 84 | headers = None |
|
85 | 85 | if session.request['set_cookie']: |
|
86 | 86 | # send set-cookie headers back to response to update cookie |
|
87 | 87 | headers = [('Set-Cookie', session.request['cookie_out'])] |
|
88 | 88 | return headers |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | def get_came_from(request): |
|
92 | 92 | came_from = safe_str(request.GET.get('came_from', '')) |
|
93 | parsed = urlparse.urlparse(came_from) | |
|
93 | parsed = urllib.parse.urlparse.urlparse(came_from) | |
|
94 | 94 | allowed_schemes = ['http', 'https'] |
|
95 | 95 | default_came_from = h.route_path('home') |
|
96 | 96 | if parsed.scheme and parsed.scheme not in allowed_schemes: |
|
97 | 97 | log.error('Suspicious URL scheme detected %s for url %s', |
|
98 | 98 | parsed.scheme, parsed) |
|
99 | 99 | came_from = default_came_from |
|
100 | 100 | elif parsed.netloc and request.host != parsed.netloc: |
|
101 | 101 | log.error('Suspicious NETLOC detected %s for url %s server url ' |
|
102 | 102 | 'is: %s', parsed.netloc, parsed, request.host) |
|
103 | 103 | came_from = default_came_from |
|
104 | 104 | elif any(bad_str in parsed.path for bad_str in ('\r', '\n')): |
|
105 | 105 | log.error('Header injection detected `%s` for url %s server url ', |
|
106 | 106 | parsed.path, parsed) |
|
107 | 107 | came_from = default_came_from |
|
108 | 108 | |
|
109 | 109 | return came_from or default_came_from |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | class LoginView(BaseAppView): |
|
113 | 113 | |
|
114 | 114 | def load_default_context(self): |
|
115 | 115 | c = self._get_local_tmpl_context() |
|
116 | 116 | c.came_from = get_came_from(self.request) |
|
117 | 117 | return c |
|
118 | 118 | |
|
119 | 119 | def _get_captcha_data(self): |
|
120 | 120 | settings = SettingsModel().get_all_settings() |
|
121 | 121 | private_key = settings.get('rhodecode_captcha_private_key') |
|
122 | 122 | public_key = settings.get('rhodecode_captcha_public_key') |
|
123 | 123 | active = bool(private_key) |
|
124 | 124 | return CaptchaData( |
|
125 | 125 | active=active, private_key=private_key, public_key=public_key) |
|
126 | 126 | |
|
127 | 127 | def validate_captcha(self, private_key): |
|
128 | 128 | |
|
129 | 129 | captcha_rs = self.request.POST.get('g-recaptcha-response') |
|
130 | 130 | url = "https://www.google.com/recaptcha/api/siteverify" |
|
131 | 131 | params = { |
|
132 | 132 | 'secret': private_key, |
|
133 | 133 | 'response': captcha_rs, |
|
134 | 134 | 'remoteip': get_ip_addr(self.request.environ) |
|
135 | 135 | } |
|
136 | 136 | verify_rs = requests.get(url, params=params, verify=True, timeout=60) |
|
137 | 137 | verify_rs = verify_rs.json() |
|
138 | 138 | captcha_status = verify_rs.get('success', False) |
|
139 | 139 | captcha_errors = verify_rs.get('error-codes', []) |
|
140 | 140 | if not isinstance(captcha_errors, list): |
|
141 | 141 | captcha_errors = [captcha_errors] |
|
142 | 142 | captcha_errors = ', '.join(captcha_errors) |
|
143 | 143 | captcha_message = '' |
|
144 | 144 | if captcha_status is False: |
|
145 | 145 | captcha_message = "Bad captcha. Errors: {}".format( |
|
146 | 146 | captcha_errors) |
|
147 | 147 | |
|
148 | 148 | return captcha_status, captcha_message |
|
149 | 149 | |
|
150 | 150 | def login(self): |
|
151 | 151 | c = self.load_default_context() |
|
152 | 152 | auth_user = self._rhodecode_user |
|
153 | 153 | |
|
154 | 154 | # redirect if already logged in |
|
155 | 155 | if (auth_user.is_authenticated and |
|
156 | 156 | not auth_user.is_default and auth_user.ip_allowed): |
|
157 | 157 | raise HTTPFound(c.came_from) |
|
158 | 158 | |
|
159 | 159 | # check if we use headers plugin, and try to login using it. |
|
160 | 160 | try: |
|
161 | 161 | log.debug('Running PRE-AUTH for headers based authentication') |
|
162 | 162 | auth_info = authenticate( |
|
163 | 163 | '', '', self.request.environ, HTTP_TYPE, skip_missing=True) |
|
164 | 164 | if auth_info: |
|
165 | 165 | headers = store_user_in_session( |
|
166 | 166 | self.session, auth_info.get('username')) |
|
167 | 167 | raise HTTPFound(c.came_from, headers=headers) |
|
168 | 168 | except UserCreationError as e: |
|
169 | 169 | log.error(e) |
|
170 | 170 | h.flash(e, category='error') |
|
171 | 171 | |
|
172 | 172 | return self._get_template_context(c) |
|
173 | 173 | |
|
174 | 174 | def login_post(self): |
|
175 | 175 | c = self.load_default_context() |
|
176 | 176 | |
|
177 | 177 | login_form = LoginForm(self.request.translate)() |
|
178 | 178 | |
|
179 | 179 | try: |
|
180 | 180 | self.session.invalidate() |
|
181 | 181 | form_result = login_form.to_python(self.request.POST) |
|
182 | 182 | # form checks for username/password, now we're authenticated |
|
183 | 183 | headers = store_user_in_session( |
|
184 | 184 | self.session, |
|
185 | 185 | username=form_result['username'], |
|
186 | 186 | remember=form_result['remember']) |
|
187 | 187 | log.debug('Redirecting to "%s" after login.', c.came_from) |
|
188 | 188 | |
|
189 | 189 | audit_user = audit_logger.UserWrap( |
|
190 | 190 | username=self.request.POST.get('username'), |
|
191 | 191 | ip_addr=self.request.remote_addr) |
|
192 | 192 | action_data = {'user_agent': self.request.user_agent} |
|
193 | 193 | audit_logger.store_web( |
|
194 | 194 | 'user.login.success', action_data=action_data, |
|
195 | 195 | user=audit_user, commit=True) |
|
196 | 196 | |
|
197 | 197 | raise HTTPFound(c.came_from, headers=headers) |
|
198 | 198 | except formencode.Invalid as errors: |
|
199 | 199 | defaults = errors.value |
|
200 | 200 | # remove password from filling in form again |
|
201 | 201 | defaults.pop('password', None) |
|
202 | 202 | render_ctx = { |
|
203 | 203 | 'errors': errors.error_dict, |
|
204 | 204 | 'defaults': defaults, |
|
205 | 205 | } |
|
206 | 206 | |
|
207 | 207 | audit_user = audit_logger.UserWrap( |
|
208 | 208 | username=self.request.POST.get('username'), |
|
209 | 209 | ip_addr=self.request.remote_addr) |
|
210 | 210 | action_data = {'user_agent': self.request.user_agent} |
|
211 | 211 | audit_logger.store_web( |
|
212 | 212 | 'user.login.failure', action_data=action_data, |
|
213 | 213 | user=audit_user, commit=True) |
|
214 | 214 | return self._get_template_context(c, **render_ctx) |
|
215 | 215 | |
|
216 | 216 | except UserCreationError as e: |
|
217 | 217 | # headers auth or other auth functions that create users on |
|
218 | 218 | # the fly can throw this exception signaling that there's issue |
|
219 | 219 | # with user creation, explanation should be provided in |
|
220 | 220 | # Exception itself |
|
221 | 221 | h.flash(e, category='error') |
|
222 | 222 | return self._get_template_context(c) |
|
223 | 223 | |
|
224 | 224 | @CSRFRequired() |
|
225 | 225 | def logout(self): |
|
226 | 226 | auth_user = self._rhodecode_user |
|
227 | 227 | log.info('Deleting session for user: `%s`', auth_user) |
|
228 | 228 | |
|
229 | 229 | action_data = {'user_agent': self.request.user_agent} |
|
230 | 230 | audit_logger.store_web( |
|
231 | 231 | 'user.logout', action_data=action_data, |
|
232 | 232 | user=auth_user, commit=True) |
|
233 | 233 | self.session.delete() |
|
234 | 234 | return HTTPFound(h.route_path('home')) |
|
235 | 235 | |
|
236 | 236 | @HasPermissionAnyDecorator( |
|
237 | 237 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
238 | 238 | def register(self, defaults=None, errors=None): |
|
239 | 239 | c = self.load_default_context() |
|
240 | 240 | defaults = defaults or {} |
|
241 | 241 | errors = errors or {} |
|
242 | 242 | |
|
243 | 243 | settings = SettingsModel().get_all_settings() |
|
244 | 244 | register_message = settings.get('rhodecode_register_message') or '' |
|
245 | 245 | captcha = self._get_captcha_data() |
|
246 | 246 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
247 | 247 | .AuthUser().permissions['global'] |
|
248 | 248 | |
|
249 | 249 | render_ctx = self._get_template_context(c) |
|
250 | 250 | render_ctx.update({ |
|
251 | 251 | 'defaults': defaults, |
|
252 | 252 | 'errors': errors, |
|
253 | 253 | 'auto_active': auto_active, |
|
254 | 254 | 'captcha_active': captcha.active, |
|
255 | 255 | 'captcha_public_key': captcha.public_key, |
|
256 | 256 | 'register_message': register_message, |
|
257 | 257 | }) |
|
258 | 258 | return render_ctx |
|
259 | 259 | |
|
260 | 260 | @HasPermissionAnyDecorator( |
|
261 | 261 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
262 | 262 | def register_post(self): |
|
263 | 263 | from rhodecode.authentication.plugins import auth_rhodecode |
|
264 | 264 | |
|
265 | 265 | self.load_default_context() |
|
266 | 266 | captcha = self._get_captcha_data() |
|
267 | 267 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
268 | 268 | .AuthUser().permissions['global'] |
|
269 | 269 | |
|
270 | 270 | extern_name = auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
271 | 271 | extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
272 | 272 | |
|
273 | 273 | register_form = RegisterForm(self.request.translate)() |
|
274 | 274 | try: |
|
275 | 275 | |
|
276 | 276 | form_result = register_form.to_python(self.request.POST) |
|
277 | 277 | form_result['active'] = auto_active |
|
278 | 278 | external_identity = self.request.POST.get('external_identity') |
|
279 | 279 | |
|
280 | 280 | if external_identity: |
|
281 | 281 | extern_name = external_identity |
|
282 | 282 | extern_type = external_identity |
|
283 | 283 | |
|
284 | 284 | if captcha.active: |
|
285 | 285 | captcha_status, captcha_message = self.validate_captcha( |
|
286 | 286 | captcha.private_key) |
|
287 | 287 | |
|
288 | 288 | if not captcha_status: |
|
289 | 289 | _value = form_result |
|
290 | 290 | _msg = _('Bad captcha') |
|
291 | 291 | error_dict = {'recaptcha_field': captcha_message} |
|
292 | 292 | raise formencode.Invalid( |
|
293 | 293 | _msg, _value, None, error_dict=error_dict) |
|
294 | 294 | |
|
295 | 295 | new_user = UserModel().create_registration( |
|
296 | 296 | form_result, extern_name=extern_name, extern_type=extern_type) |
|
297 | 297 | |
|
298 | 298 | action_data = {'data': new_user.get_api_data(), |
|
299 | 299 | 'user_agent': self.request.user_agent} |
|
300 | 300 | |
|
301 | 301 | if external_identity: |
|
302 | 302 | action_data['external_identity'] = external_identity |
|
303 | 303 | |
|
304 | 304 | audit_user = audit_logger.UserWrap( |
|
305 | 305 | username=new_user.username, |
|
306 | 306 | user_id=new_user.user_id, |
|
307 | 307 | ip_addr=self.request.remote_addr) |
|
308 | 308 | |
|
309 | 309 | audit_logger.store_web( |
|
310 | 310 | 'user.register', action_data=action_data, |
|
311 | 311 | user=audit_user) |
|
312 | 312 | |
|
313 | 313 | event = UserRegistered(user=new_user, session=self.session) |
|
314 | 314 | trigger(event) |
|
315 | 315 | h.flash( |
|
316 | 316 | _('You have successfully registered with RhodeCode. You can log-in now.'), |
|
317 | 317 | category='success') |
|
318 | 318 | if external_identity: |
|
319 | 319 | h.flash( |
|
320 | 320 | _('Please use the {identity} button to log-in').format( |
|
321 | 321 | identity=external_identity), |
|
322 | 322 | category='success') |
|
323 | 323 | Session().commit() |
|
324 | 324 | |
|
325 | 325 | redirect_ro = self.request.route_path('login') |
|
326 | 326 | raise HTTPFound(redirect_ro) |
|
327 | 327 | |
|
328 | 328 | except formencode.Invalid as errors: |
|
329 | 329 | errors.value.pop('password', None) |
|
330 | 330 | errors.value.pop('password_confirmation', None) |
|
331 | 331 | return self.register( |
|
332 | 332 | defaults=errors.value, errors=errors.error_dict) |
|
333 | 333 | |
|
334 | 334 | except UserCreationError as e: |
|
335 | 335 | # container auth or other auth functions that create users on |
|
336 | 336 | # the fly can throw this exception signaling that there's issue |
|
337 | 337 | # with user creation, explanation should be provided in |
|
338 | 338 | # Exception itself |
|
339 | 339 | h.flash(e, category='error') |
|
340 | 340 | return self.register() |
|
341 | 341 | |
|
342 | 342 | def password_reset(self): |
|
343 | 343 | c = self.load_default_context() |
|
344 | 344 | captcha = self._get_captcha_data() |
|
345 | 345 | |
|
346 | 346 | template_context = { |
|
347 | 347 | 'captcha_active': captcha.active, |
|
348 | 348 | 'captcha_public_key': captcha.public_key, |
|
349 | 349 | 'defaults': {}, |
|
350 | 350 | 'errors': {}, |
|
351 | 351 | } |
|
352 | 352 | |
|
353 | 353 | # always send implicit message to prevent from discovery of |
|
354 | 354 | # matching emails |
|
355 | 355 | msg = _('If such email exists, a password reset link was sent to it.') |
|
356 | 356 | |
|
357 | 357 | def default_response(): |
|
358 | 358 | log.debug('faking response on invalid password reset') |
|
359 | 359 | # make this take 2s, to prevent brute forcing. |
|
360 | 360 | time.sleep(2) |
|
361 | 361 | h.flash(msg, category='success') |
|
362 | 362 | return HTTPFound(self.request.route_path('reset_password')) |
|
363 | 363 | |
|
364 | 364 | if self.request.POST: |
|
365 | 365 | if h.HasPermissionAny('hg.password_reset.disabled')(): |
|
366 | 366 | _email = self.request.POST.get('email', '') |
|
367 | 367 | log.error('Failed attempt to reset password for `%s`.', _email) |
|
368 | 368 | h.flash(_('Password reset has been disabled.'), category='error') |
|
369 | 369 | return HTTPFound(self.request.route_path('reset_password')) |
|
370 | 370 | |
|
371 | 371 | password_reset_form = PasswordResetForm(self.request.translate)() |
|
372 | 372 | description = u'Generated token for password reset from {}'.format( |
|
373 | 373 | datetime.datetime.now().isoformat()) |
|
374 | 374 | |
|
375 | 375 | try: |
|
376 | 376 | form_result = password_reset_form.to_python( |
|
377 | 377 | self.request.POST) |
|
378 | 378 | user_email = form_result['email'] |
|
379 | 379 | |
|
380 | 380 | if captcha.active: |
|
381 | 381 | captcha_status, captcha_message = self.validate_captcha( |
|
382 | 382 | captcha.private_key) |
|
383 | 383 | |
|
384 | 384 | if not captcha_status: |
|
385 | 385 | _value = form_result |
|
386 | 386 | _msg = _('Bad captcha') |
|
387 | 387 | error_dict = {'recaptcha_field': captcha_message} |
|
388 | 388 | raise formencode.Invalid( |
|
389 | 389 | _msg, _value, None, error_dict=error_dict) |
|
390 | 390 | |
|
391 | 391 | # Generate reset URL and send mail. |
|
392 | 392 | user = User.get_by_email(user_email) |
|
393 | 393 | |
|
394 | 394 | # only allow rhodecode based users to reset their password |
|
395 | 395 | # external auth shouldn't allow password reset |
|
396 | 396 | if user and user.extern_type != auth_rhodecode.RhodeCodeAuthPlugin.uid: |
|
397 | 397 | log.warning('User %s with external type `%s` tried a password reset. ' |
|
398 | 398 | 'This try was rejected', user, user.extern_type) |
|
399 | 399 | return default_response() |
|
400 | 400 | |
|
401 | 401 | # generate password reset token that expires in 10 minutes |
|
402 | 402 | reset_token = UserModel().add_auth_token( |
|
403 | 403 | user=user, lifetime_minutes=10, |
|
404 | 404 | role=UserModel.auth_token_role.ROLE_PASSWORD_RESET, |
|
405 | 405 | description=description) |
|
406 | 406 | Session().commit() |
|
407 | 407 | |
|
408 | 408 | log.debug('Successfully created password recovery token') |
|
409 | 409 | password_reset_url = self.request.route_url( |
|
410 | 410 | 'reset_password_confirmation', |
|
411 | 411 | _query={'key': reset_token.api_key}) |
|
412 | 412 | UserModel().reset_password_link( |
|
413 | 413 | form_result, password_reset_url) |
|
414 | 414 | |
|
415 | 415 | action_data = {'email': user_email, |
|
416 | 416 | 'user_agent': self.request.user_agent} |
|
417 | 417 | audit_logger.store_web( |
|
418 | 418 | 'user.password.reset_request', action_data=action_data, |
|
419 | 419 | user=self._rhodecode_user, commit=True) |
|
420 | 420 | |
|
421 | 421 | return default_response() |
|
422 | 422 | |
|
423 | 423 | except formencode.Invalid as errors: |
|
424 | 424 | template_context.update({ |
|
425 | 425 | 'defaults': errors.value, |
|
426 | 426 | 'errors': errors.error_dict, |
|
427 | 427 | }) |
|
428 | 428 | if not self.request.POST.get('email'): |
|
429 | 429 | # case of empty email, we want to report that |
|
430 | 430 | return self._get_template_context(c, **template_context) |
|
431 | 431 | |
|
432 | 432 | if 'recaptcha_field' in errors.error_dict: |
|
433 | 433 | # case of failed captcha |
|
434 | 434 | return self._get_template_context(c, **template_context) |
|
435 | 435 | |
|
436 | 436 | return default_response() |
|
437 | 437 | |
|
438 | 438 | return self._get_template_context(c, **template_context) |
|
439 | 439 | |
|
440 | 440 | def password_reset_confirmation(self): |
|
441 | 441 | self.load_default_context() |
|
442 | 442 | if self.request.GET and self.request.GET.get('key'): |
|
443 | 443 | # make this take 2s, to prevent brute forcing. |
|
444 | 444 | time.sleep(2) |
|
445 | 445 | |
|
446 | 446 | token = AuthTokenModel().get_auth_token( |
|
447 | 447 | self.request.GET.get('key')) |
|
448 | 448 | |
|
449 | 449 | # verify token is the correct role |
|
450 | 450 | if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET: |
|
451 | 451 | log.debug('Got token with role:%s expected is %s', |
|
452 | 452 | getattr(token, 'role', 'EMPTY_TOKEN'), |
|
453 | 453 | UserApiKeys.ROLE_PASSWORD_RESET) |
|
454 | 454 | h.flash( |
|
455 | 455 | _('Given reset token is invalid'), category='error') |
|
456 | 456 | return HTTPFound(self.request.route_path('reset_password')) |
|
457 | 457 | |
|
458 | 458 | try: |
|
459 | 459 | owner = token.user |
|
460 | 460 | data = {'email': owner.email, 'token': token.api_key} |
|
461 | 461 | UserModel().reset_password(data) |
|
462 | 462 | h.flash( |
|
463 | 463 | _('Your password reset was successful, ' |
|
464 | 464 | 'a new password has been sent to your email'), |
|
465 | 465 | category='success') |
|
466 | 466 | except Exception as e: |
|
467 | 467 | log.error(e) |
|
468 | 468 | return HTTPFound(self.request.route_path('reset_password')) |
|
469 | 469 | |
|
470 | 470 | return HTTPFound(self.request.route_path('login')) |
@@ -1,258 +1,258 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import sys |
|
24 | 24 | import logging |
|
25 | 25 | import signal |
|
26 | 26 | import tempfile |
|
27 | 27 | from subprocess import Popen, PIPE |
|
28 | import urlparse | |
|
28 | import urllib.parse | |
|
29 | 29 | |
|
30 | 30 | from .base import VcsServer |
|
31 | 31 | |
|
32 | 32 | log = logging.getLogger(__name__) |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | class SubversionTunnelWrapper(object): |
|
36 | 36 | process = None |
|
37 | 37 | |
|
38 | 38 | def __init__(self, server): |
|
39 | 39 | self.server = server |
|
40 | 40 | self.timeout = 30 |
|
41 | 41 | self.stdin = sys.stdin |
|
42 | 42 | self.stdout = sys.stdout |
|
43 | 43 | self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp() |
|
44 | 44 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp() |
|
45 | 45 | |
|
46 | 46 | self.read_only = True # flag that we set to make the hooks readonly |
|
47 | 47 | |
|
48 | 48 | def create_svn_config(self): |
|
49 | 49 | content = ( |
|
50 | 50 | '[general]\n' |
|
51 | 51 | 'hooks-env = {}\n').format(self.hooks_env_path) |
|
52 | 52 | with os.fdopen(self.svn_conf_fd, 'w') as config_file: |
|
53 | 53 | config_file.write(content) |
|
54 | 54 | |
|
55 | 55 | def create_hooks_env(self): |
|
56 | 56 | content = ( |
|
57 | 57 | '[default]\n' |
|
58 | 58 | 'LANG = en_US.UTF-8\n') |
|
59 | 59 | if self.read_only: |
|
60 | 60 | content += 'SSH_READ_ONLY = 1\n' |
|
61 | 61 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: |
|
62 | 62 | hooks_env_file.write(content) |
|
63 | 63 | |
|
64 | 64 | def remove_configs(self): |
|
65 | 65 | os.remove(self.svn_conf_path) |
|
66 | 66 | os.remove(self.hooks_env_path) |
|
67 | 67 | |
|
68 | 68 | def command(self): |
|
69 | 69 | root = self.server.get_root_store() |
|
70 | 70 | username = self.server.user.username |
|
71 | 71 | |
|
72 | 72 | command = [ |
|
73 | 73 | self.server.svn_path, '-t', |
|
74 | 74 | '--config-file', self.svn_conf_path, |
|
75 | 75 | '--tunnel-user', username, |
|
76 | 76 | '-r', root] |
|
77 | 77 | log.debug("Final CMD: %s", ' '.join(command)) |
|
78 | 78 | return command |
|
79 | 79 | |
|
80 | 80 | def start(self): |
|
81 | 81 | command = self.command() |
|
82 | 82 | self.process = Popen(' '.join(command), stdin=PIPE, shell=True) |
|
83 | 83 | |
|
84 | 84 | def sync(self): |
|
85 | 85 | while self.process.poll() is None: |
|
86 | 86 | next_byte = self.stdin.read(1) |
|
87 | 87 | if not next_byte: |
|
88 | 88 | break |
|
89 | 89 | self.process.stdin.write(next_byte) |
|
90 | 90 | self.remove_configs() |
|
91 | 91 | |
|
92 | 92 | @property |
|
93 | 93 | def return_code(self): |
|
94 | 94 | return self.process.returncode |
|
95 | 95 | |
|
96 | 96 | def get_first_client_response(self): |
|
97 | 97 | signal.signal(signal.SIGALRM, self.interrupt) |
|
98 | 98 | signal.alarm(self.timeout) |
|
99 | 99 | first_response = self._read_first_client_response() |
|
100 | 100 | signal.alarm(0) |
|
101 | 101 | return (self._parse_first_client_response(first_response) |
|
102 | 102 | if first_response else None) |
|
103 | 103 | |
|
104 | 104 | def patch_first_client_response(self, response, **kwargs): |
|
105 | 105 | self.create_hooks_env() |
|
106 | 106 | data = response.copy() |
|
107 | 107 | data.update(kwargs) |
|
108 | 108 | data['url'] = self._svn_string(data['url']) |
|
109 | 109 | data['ra_client'] = self._svn_string(data['ra_client']) |
|
110 | 110 | data['client'] = data['client'] or '' |
|
111 | 111 | buffer_ = ( |
|
112 | 112 | "( {version} ( {capabilities} ) {url}{ra_client}" |
|
113 | 113 | "( {client}) ) ".format(**data)) |
|
114 | 114 | self.process.stdin.write(buffer_) |
|
115 | 115 | |
|
116 | 116 | def fail(self, message): |
|
117 | 117 | print("( failure ( ( 210005 {message} 0: 0 ) ) )".format( |
|
118 | 118 | message=self._svn_string(message))) |
|
119 | 119 | self.remove_configs() |
|
120 | 120 | self.process.kill() |
|
121 | 121 | return 1 |
|
122 | 122 | |
|
123 | 123 | def interrupt(self, signum, frame): |
|
124 | 124 | self.fail("Exited by timeout") |
|
125 | 125 | |
|
126 | 126 | def _svn_string(self, str_): |
|
127 | 127 | if not str_: |
|
128 | 128 | return '' |
|
129 | 129 | return '{length}:{string} '.format(length=len(str_), string=str_) |
|
130 | 130 | |
|
131 | 131 | def _read_first_client_response(self): |
|
132 | 132 | buffer_ = "" |
|
133 | 133 | brackets_stack = [] |
|
134 | 134 | while True: |
|
135 | 135 | next_byte = self.stdin.read(1) |
|
136 | 136 | buffer_ += next_byte |
|
137 | 137 | if next_byte == "(": |
|
138 | 138 | brackets_stack.append(next_byte) |
|
139 | 139 | elif next_byte == ")": |
|
140 | 140 | brackets_stack.pop() |
|
141 | 141 | elif next_byte == " " and not brackets_stack: |
|
142 | 142 | break |
|
143 | 143 | |
|
144 | 144 | return buffer_ |
|
145 | 145 | |
|
146 | 146 | def _parse_first_client_response(self, buffer_): |
|
147 | 147 | """ |
|
148 | 148 | According to the Subversion RA protocol, the first request |
|
149 | 149 | should look like: |
|
150 | 150 | |
|
151 | 151 | ( version:number ( cap:word ... ) url:string ? ra-client:string |
|
152 | 152 | ( ? client:string ) ) |
|
153 | 153 | |
|
154 | 154 | Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol |
|
155 | 155 | """ |
|
156 | 156 | version_re = r'(?P<version>\d+)' |
|
157 | 157 | capabilities_re = r'\(\s(?P<capabilities>[\w\d\-\ ]+)\s\)' |
|
158 | 158 | url_re = r'\d+\:(?P<url>[\W\w]+)' |
|
159 | 159 | ra_client_re = r'(\d+\:(?P<ra_client>[\W\w]+)\s)' |
|
160 | 160 | client_re = r'(\d+\:(?P<client>[\W\w]+)\s)*' |
|
161 | 161 | regex = re.compile( |
|
162 | 162 | r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}' |
|
163 | 163 | r'\(\s{client}\)\s\)\s*$'.format( |
|
164 | 164 | version=version_re, capabilities=capabilities_re, |
|
165 | 165 | url=url_re, ra_client=ra_client_re, client=client_re)) |
|
166 | 166 | matcher = regex.match(buffer_) |
|
167 | 167 | |
|
168 | 168 | return matcher.groupdict() if matcher else None |
|
169 | 169 | |
|
170 | 170 | def _match_repo_name(self, url): |
|
171 | 171 | """ |
|
172 | 172 | Given an server url, try to match it against ALL known repository names. |
|
173 | 173 | This handles a tricky SVN case for SSH and subdir commits. |
|
174 | 174 | E.g if our repo name is my-svn-repo, a svn commit on file in a subdir would |
|
175 | 175 | result in the url with this subdir added. |
|
176 | 176 | """ |
|
177 | 177 | # case 1 direct match, we don't do any "heavy" lookups |
|
178 | 178 | if url in self.server.user_permissions: |
|
179 | 179 | return url |
|
180 | 180 | |
|
181 | 181 | log.debug('Extracting repository name from subdir path %s', url) |
|
182 | 182 | # case 2 we check all permissions, and match closes possible case... |
|
183 | 183 | # NOTE(dan): In this case we only know that url has a subdir parts, it's safe |
|
184 | 184 | # to assume that it will have the repo name as prefix, we ensure the prefix |
|
185 | 185 | # for similar repositories isn't matched by adding a / |
|
186 | 186 | # e.g subgroup/repo-name/ and subgroup/repo-name-1/ would work correct. |
|
187 | 187 | for repo_name in self.server.user_permissions: |
|
188 | 188 | repo_name_prefix = repo_name + '/' |
|
189 | 189 | if url.startswith(repo_name_prefix): |
|
190 | 190 | log.debug('Found prefix %s match, returning proper repository name', |
|
191 | 191 | repo_name_prefix) |
|
192 | 192 | return repo_name |
|
193 | 193 | |
|
194 | 194 | return |
|
195 | 195 | |
|
196 | 196 | def run(self, extras): |
|
197 | 197 | action = 'pull' |
|
198 | 198 | self.create_svn_config() |
|
199 | 199 | self.start() |
|
200 | 200 | |
|
201 | 201 | first_response = self.get_first_client_response() |
|
202 | 202 | if not first_response: |
|
203 | 203 | return self.fail("Repository name cannot be extracted") |
|
204 | 204 | |
|
205 | url_parts = urlparse.urlparse(first_response['url']) | |
|
205 | url_parts = urllib.parse.urlparse.urlparse(first_response['url']) | |
|
206 | 206 | |
|
207 | 207 | self.server.repo_name = self._match_repo_name(url_parts.path.strip('/')) |
|
208 | 208 | |
|
209 | 209 | exit_code = self.server._check_permissions(action) |
|
210 | 210 | if exit_code: |
|
211 | 211 | return exit_code |
|
212 | 212 | |
|
213 | 213 | # set the readonly flag to False if we have proper permissions |
|
214 | 214 | if self.server.has_write_perm(): |
|
215 | 215 | self.read_only = False |
|
216 | 216 | self.server.update_environment(action=action, extras=extras) |
|
217 | 217 | |
|
218 | 218 | self.patch_first_client_response(first_response) |
|
219 | 219 | self.sync() |
|
220 | 220 | return self.return_code |
|
221 | 221 | |
|
222 | 222 | |
|
223 | 223 | class SubversionServer(VcsServer): |
|
224 | 224 | backend = 'svn' |
|
225 | 225 | repo_user_agent = 'svn' |
|
226 | 226 | |
|
227 | 227 | def __init__(self, store, ini_path, repo_name, |
|
228 | 228 | user, user_permissions, config, env): |
|
229 | 229 | super(SubversionServer, self)\ |
|
230 | 230 | .__init__(user, user_permissions, config, env) |
|
231 | 231 | self.store = store |
|
232 | 232 | self.ini_path = ini_path |
|
233 | 233 | # NOTE(dan): repo_name at this point is empty, |
|
234 | 234 | # this is set later in .run() based from parsed input stream |
|
235 | 235 | self.repo_name = repo_name |
|
236 | 236 | self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn') |
|
237 | 237 | |
|
238 | 238 | self.tunnel = SubversionTunnelWrapper(server=self) |
|
239 | 239 | |
|
240 | 240 | def _handle_tunnel(self, extras): |
|
241 | 241 | |
|
242 | 242 | # pre-auth |
|
243 | 243 | action = 'pull' |
|
244 | 244 | # Special case for SVN, we extract repo name at later stage |
|
245 | 245 | # exit_code = self._check_permissions(action) |
|
246 | 246 | # if exit_code: |
|
247 | 247 | # return exit_code, False |
|
248 | 248 | |
|
249 | 249 | req = self.env['request'] |
|
250 | 250 | server_url = req.host_url + req.script_name |
|
251 | 251 | extras['server_url'] = server_url |
|
252 | 252 | |
|
253 | 253 | log.debug('Using %s binaries from path %s', self.backend, self._path) |
|
254 | 254 | exit_code = self.tunnel.run(extras) |
|
255 | 255 | |
|
256 | 256 | return exit_code, action == "push" |
|
257 | 257 | |
|
258 | 258 |
@@ -1,580 +1,580 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Renderer for markup languages with ability to parse using rst or markdown |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import re |
|
27 | 27 | import os |
|
28 | 28 | import lxml |
|
29 | 29 | import logging |
|
30 | import urlparse | |
|
30 | import urllib.parse | |
|
31 | 31 | import bleach |
|
32 | 32 | |
|
33 | 33 | from mako.lookup import TemplateLookup |
|
34 | 34 | from mako.template import Template as MakoTemplate |
|
35 | 35 | |
|
36 | 36 | from docutils.core import publish_parts |
|
37 | 37 | from docutils.parsers.rst import directives |
|
38 | 38 | from docutils import writers |
|
39 | 39 | from docutils.writers import html4css1 |
|
40 | 40 | import markdown |
|
41 | 41 | |
|
42 | 42 | from rhodecode.lib.markdown_ext import GithubFlavoredMarkdownExtension |
|
43 | 43 | from rhodecode.lib.utils2 import (safe_unicode, md5_safe, MENTIONS_REGEX) |
|
44 | 44 | |
|
45 | 45 | log = logging.getLogger(__name__) |
|
46 | 46 | |
|
47 | 47 | # default renderer used to generate automated comments |
|
48 | 48 | DEFAULT_COMMENTS_RENDERER = 'rst' |
|
49 | 49 | |
|
50 | 50 | try: |
|
51 | 51 | from lxml.html import fromstring |
|
52 | 52 | from lxml.html import tostring |
|
53 | 53 | except ImportError: |
|
54 | 54 | log.exception('Failed to import lxml') |
|
55 | 55 | fromstring = None |
|
56 | 56 | tostring = None |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class CustomHTMLTranslator(writers.html4css1.HTMLTranslator): |
|
60 | 60 | """ |
|
61 | 61 | Custom HTML Translator used for sandboxing potential |
|
62 | 62 | JS injections in ref links |
|
63 | 63 | """ |
|
64 | 64 | def visit_literal_block(self, node): |
|
65 | 65 | self.body.append(self.starttag(node, 'pre', CLASS='codehilite literal-block')) |
|
66 | 66 | |
|
67 | 67 | def visit_reference(self, node): |
|
68 | 68 | if 'refuri' in node.attributes: |
|
69 | 69 | refuri = node['refuri'] |
|
70 | 70 | if ':' in refuri: |
|
71 | 71 | prefix, link = refuri.lstrip().split(':', 1) |
|
72 | 72 | prefix = prefix or '' |
|
73 | 73 | |
|
74 | 74 | if prefix.lower() == 'javascript': |
|
75 | 75 | # we don't allow javascript type of refs... |
|
76 | 76 | node['refuri'] = 'javascript:alert("SandBoxedJavascript")' |
|
77 | 77 | |
|
78 | 78 | # old style class requires this... |
|
79 | 79 | return html4css1.HTMLTranslator.visit_reference(self, node) |
|
80 | 80 | |
|
81 | 81 | |
|
82 | 82 | class RhodeCodeWriter(writers.html4css1.Writer): |
|
83 | 83 | def __init__(self): |
|
84 | 84 | writers.Writer.__init__(self) |
|
85 | 85 | self.translator_class = CustomHTMLTranslator |
|
86 | 86 | |
|
87 | 87 | |
|
88 | 88 | def relative_links(html_source, server_paths): |
|
89 | 89 | if not html_source: |
|
90 | 90 | return html_source |
|
91 | 91 | |
|
92 | 92 | if not fromstring and tostring: |
|
93 | 93 | return html_source |
|
94 | 94 | |
|
95 | 95 | try: |
|
96 | 96 | doc = lxml.html.fromstring(html_source) |
|
97 | 97 | except Exception: |
|
98 | 98 | return html_source |
|
99 | 99 | |
|
100 | 100 | for el in doc.cssselect('img, video'): |
|
101 | 101 | src = el.attrib.get('src') |
|
102 | 102 | if src: |
|
103 | 103 | el.attrib['src'] = relative_path(src, server_paths['raw']) |
|
104 | 104 | |
|
105 | 105 | for el in doc.cssselect('a:not(.gfm)'): |
|
106 | 106 | src = el.attrib.get('href') |
|
107 | 107 | if src: |
|
108 | 108 | raw_mode = el.attrib['href'].endswith('?raw=1') |
|
109 | 109 | if raw_mode: |
|
110 | 110 | el.attrib['href'] = relative_path(src, server_paths['raw']) |
|
111 | 111 | else: |
|
112 | 112 | el.attrib['href'] = relative_path(src, server_paths['standard']) |
|
113 | 113 | |
|
114 | 114 | return lxml.html.tostring(doc) |
|
115 | 115 | |
|
116 | 116 | |
|
117 | 117 | def relative_path(path, request_path, is_repo_file=None): |
|
118 | 118 | """ |
|
119 | 119 | relative link support, path is a rel path, and request_path is current |
|
120 | 120 | server path (not absolute) |
|
121 | 121 | |
|
122 | 122 | e.g. |
|
123 | 123 | |
|
124 | 124 | path = '../logo.png' |
|
125 | 125 | request_path= '/repo/files/path/file.md' |
|
126 | 126 | produces: '/repo/files/logo.png' |
|
127 | 127 | """ |
|
128 | 128 | # TODO(marcink): unicode/str support ? |
|
129 | 129 | # maybe=> safe_unicode(urllib.quote(safe_str(final_path), '/:')) |
|
130 | 130 | |
|
131 | 131 | def dummy_check(p): |
|
132 | 132 | return True # assume default is a valid file path |
|
133 | 133 | |
|
134 | 134 | is_repo_file = is_repo_file or dummy_check |
|
135 | 135 | if not path: |
|
136 | 136 | return request_path |
|
137 | 137 | |
|
138 | 138 | path = safe_unicode(path) |
|
139 | 139 | request_path = safe_unicode(request_path) |
|
140 | 140 | |
|
141 | 141 | if path.startswith((u'data:', u'javascript:', u'#', u':')): |
|
142 | 142 | # skip data, anchor, invalid links |
|
143 | 143 | return path |
|
144 | 144 | |
|
145 | is_absolute = bool(urlparse.urlparse(path).netloc) | |
|
145 | is_absolute = bool(urllib.parse.urlparse.urlparse(path).netloc) | |
|
146 | 146 | if is_absolute: |
|
147 | 147 | return path |
|
148 | 148 | |
|
149 | 149 | if not request_path: |
|
150 | 150 | return path |
|
151 | 151 | |
|
152 | 152 | if path.startswith(u'/'): |
|
153 | 153 | path = path[1:] |
|
154 | 154 | |
|
155 | 155 | if path.startswith(u'./'): |
|
156 | 156 | path = path[2:] |
|
157 | 157 | |
|
158 | 158 | parts = request_path.split('/') |
|
159 | 159 | # compute how deep we need to traverse the request_path |
|
160 | 160 | depth = 0 |
|
161 | 161 | |
|
162 | 162 | if is_repo_file(request_path): |
|
163 | 163 | # if request path is a VALID file, we use a relative path with |
|
164 | 164 | # one level up |
|
165 | 165 | depth += 1 |
|
166 | 166 | |
|
167 | 167 | while path.startswith(u'../'): |
|
168 | 168 | depth += 1 |
|
169 | 169 | path = path[3:] |
|
170 | 170 | |
|
171 | 171 | if depth > 0: |
|
172 | 172 | parts = parts[:-depth] |
|
173 | 173 | |
|
174 | 174 | parts.append(path) |
|
175 | 175 | final_path = u'/'.join(parts).lstrip(u'/') |
|
176 | 176 | |
|
177 | 177 | return u'/' + final_path |
|
178 | 178 | |
|
179 | 179 | |
|
180 | 180 | _cached_markdown_renderer = None |
|
181 | 181 | |
|
182 | 182 | |
|
183 | 183 | def get_markdown_renderer(extensions, output_format): |
|
184 | 184 | global _cached_markdown_renderer |
|
185 | 185 | |
|
186 | 186 | if _cached_markdown_renderer is None: |
|
187 | 187 | _cached_markdown_renderer = markdown.Markdown( |
|
188 | 188 | extensions=extensions, |
|
189 | 189 | enable_attributes=False, output_format=output_format) |
|
190 | 190 | return _cached_markdown_renderer |
|
191 | 191 | |
|
192 | 192 | |
|
193 | 193 | _cached_markdown_renderer_flavored = None |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | def get_markdown_renderer_flavored(extensions, output_format): |
|
197 | 197 | global _cached_markdown_renderer_flavored |
|
198 | 198 | |
|
199 | 199 | if _cached_markdown_renderer_flavored is None: |
|
200 | 200 | _cached_markdown_renderer_flavored = markdown.Markdown( |
|
201 | 201 | extensions=extensions + [GithubFlavoredMarkdownExtension()], |
|
202 | 202 | enable_attributes=False, output_format=output_format) |
|
203 | 203 | return _cached_markdown_renderer_flavored |
|
204 | 204 | |
|
205 | 205 | |
|
206 | 206 | class MarkupRenderer(object): |
|
207 | 207 | RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw'] |
|
208 | 208 | |
|
209 | 209 | MARKDOWN_PAT = re.compile(r'\.(md|mkdn?|mdown|markdown)$', re.IGNORECASE) |
|
210 | 210 | RST_PAT = re.compile(r'\.re?st$', re.IGNORECASE) |
|
211 | 211 | JUPYTER_PAT = re.compile(r'\.(ipynb)$', re.IGNORECASE) |
|
212 | 212 | PLAIN_PAT = re.compile(r'^readme$', re.IGNORECASE) |
|
213 | 213 | |
|
214 | 214 | URL_PAT = re.compile(r'(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]' |
|
215 | 215 | r'|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)') |
|
216 | 216 | |
|
217 | 217 | MENTION_PAT = re.compile(MENTIONS_REGEX) |
|
218 | 218 | |
|
219 | 219 | extensions = ['markdown.extensions.codehilite', 'markdown.extensions.extra', |
|
220 | 220 | 'markdown.extensions.def_list', 'markdown.extensions.sane_lists'] |
|
221 | 221 | |
|
222 | 222 | output_format = 'html4' |
|
223 | 223 | |
|
224 | 224 | # extension together with weights. Lower is first means we control how |
|
225 | 225 | # extensions are attached to readme names with those. |
|
226 | 226 | PLAIN_EXTS = [ |
|
227 | 227 | # prefer no extension |
|
228 | 228 | ('', 0), # special case that renders READMES names without extension |
|
229 | 229 | ('.text', 2), ('.TEXT', 2), |
|
230 | 230 | ('.txt', 3), ('.TXT', 3) |
|
231 | 231 | ] |
|
232 | 232 | |
|
233 | 233 | RST_EXTS = [ |
|
234 | 234 | ('.rst', 1), ('.rest', 1), |
|
235 | 235 | ('.RST', 2), ('.REST', 2) |
|
236 | 236 | ] |
|
237 | 237 | |
|
238 | 238 | MARKDOWN_EXTS = [ |
|
239 | 239 | ('.md', 1), ('.MD', 1), |
|
240 | 240 | ('.mkdn', 2), ('.MKDN', 2), |
|
241 | 241 | ('.mdown', 3), ('.MDOWN', 3), |
|
242 | 242 | ('.markdown', 4), ('.MARKDOWN', 4) |
|
243 | 243 | ] |
|
244 | 244 | |
|
245 | 245 | def _detect_renderer(self, source, filename=None): |
|
246 | 246 | """ |
|
247 | 247 | runs detection of what renderer should be used for generating html |
|
248 | 248 | from a markup language |
|
249 | 249 | |
|
250 | 250 | filename can be also explicitly a renderer name |
|
251 | 251 | |
|
252 | 252 | :param source: |
|
253 | 253 | :param filename: |
|
254 | 254 | """ |
|
255 | 255 | |
|
256 | 256 | if MarkupRenderer.MARKDOWN_PAT.findall(filename): |
|
257 | 257 | detected_renderer = 'markdown' |
|
258 | 258 | elif MarkupRenderer.RST_PAT.findall(filename): |
|
259 | 259 | detected_renderer = 'rst' |
|
260 | 260 | elif MarkupRenderer.JUPYTER_PAT.findall(filename): |
|
261 | 261 | detected_renderer = 'jupyter' |
|
262 | 262 | elif MarkupRenderer.PLAIN_PAT.findall(filename): |
|
263 | 263 | detected_renderer = 'plain' |
|
264 | 264 | else: |
|
265 | 265 | detected_renderer = 'plain' |
|
266 | 266 | |
|
267 | 267 | return getattr(MarkupRenderer, detected_renderer) |
|
268 | 268 | |
|
269 | 269 | @classmethod |
|
270 | 270 | def bleach_clean(cls, text): |
|
271 | 271 | from .bleach_whitelist import markdown_attrs, markdown_tags |
|
272 | 272 | allowed_tags = markdown_tags |
|
273 | 273 | allowed_attrs = markdown_attrs |
|
274 | 274 | |
|
275 | 275 | try: |
|
276 | 276 | return bleach.clean(text, tags=allowed_tags, attributes=allowed_attrs) |
|
277 | 277 | except Exception: |
|
278 | 278 | return 'UNPARSEABLE TEXT' |
|
279 | 279 | |
|
280 | 280 | @classmethod |
|
281 | 281 | def renderer_from_filename(cls, filename, exclude): |
|
282 | 282 | """ |
|
283 | 283 | Detect renderer markdown/rst from filename and optionally use exclude |
|
284 | 284 | list to remove some options. This is mostly used in helpers. |
|
285 | 285 | Returns None when no renderer can be detected. |
|
286 | 286 | """ |
|
287 | 287 | def _filter(elements): |
|
288 | 288 | if isinstance(exclude, (list, tuple)): |
|
289 | 289 | return [x for x in elements if x not in exclude] |
|
290 | 290 | return elements |
|
291 | 291 | |
|
292 | 292 | if filename.endswith( |
|
293 | 293 | tuple(_filter([x[0] for x in cls.MARKDOWN_EXTS if x[0]]))): |
|
294 | 294 | return 'markdown' |
|
295 | 295 | if filename.endswith(tuple(_filter([x[0] for x in cls.RST_EXTS if x[0]]))): |
|
296 | 296 | return 'rst' |
|
297 | 297 | |
|
298 | 298 | return None |
|
299 | 299 | |
|
300 | 300 | def render(self, source, filename=None): |
|
301 | 301 | """ |
|
302 | 302 | Renders a given filename using detected renderer |
|
303 | 303 | it detects renderers based on file extension or mimetype. |
|
304 | 304 | At last it will just do a simple html replacing new lines with <br/> |
|
305 | 305 | |
|
306 | 306 | :param file_name: |
|
307 | 307 | :param source: |
|
308 | 308 | """ |
|
309 | 309 | |
|
310 | 310 | renderer = self._detect_renderer(source, filename) |
|
311 | 311 | readme_data = renderer(source) |
|
312 | 312 | return readme_data |
|
313 | 313 | |
|
314 | 314 | @classmethod |
|
315 | 315 | def _flavored_markdown(cls, text): |
|
316 | 316 | """ |
|
317 | 317 | Github style flavored markdown |
|
318 | 318 | |
|
319 | 319 | :param text: |
|
320 | 320 | """ |
|
321 | 321 | |
|
322 | 322 | # Extract pre blocks. |
|
323 | 323 | extractions = {} |
|
324 | 324 | |
|
325 | 325 | def pre_extraction_callback(matchobj): |
|
326 | 326 | digest = md5_safe(matchobj.group(0)) |
|
327 | 327 | extractions[digest] = matchobj.group(0) |
|
328 | 328 | return "{gfm-extraction-%s}" % digest |
|
329 | 329 | pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL) |
|
330 | 330 | text = re.sub(pattern, pre_extraction_callback, text) |
|
331 | 331 | |
|
332 | 332 | # Prevent foo_bar_baz from ending up with an italic word in the middle. |
|
333 | 333 | def italic_callback(matchobj): |
|
334 | 334 | s = matchobj.group(0) |
|
335 | 335 | if list(s).count('_') >= 2: |
|
336 | 336 | return s.replace('_', r'\_') |
|
337 | 337 | return s |
|
338 | 338 | text = re.sub(r'^(?! {4}|\t)\w+_\w+_\w[\w_]*', italic_callback, text) |
|
339 | 339 | |
|
340 | 340 | # Insert pre block extractions. |
|
341 | 341 | def pre_insert_callback(matchobj): |
|
342 | 342 | return '\n\n' + extractions[matchobj.group(1)] |
|
343 | 343 | text = re.sub(r'\{gfm-extraction-([0-9a-f]{32})\}', |
|
344 | 344 | pre_insert_callback, text) |
|
345 | 345 | |
|
346 | 346 | return text |
|
347 | 347 | |
|
348 | 348 | @classmethod |
|
349 | 349 | def urlify_text(cls, text): |
|
350 | 350 | def url_func(match_obj): |
|
351 | 351 | url_full = match_obj.groups()[0] |
|
352 | 352 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) |
|
353 | 353 | |
|
354 | 354 | return cls.URL_PAT.sub(url_func, text) |
|
355 | 355 | |
|
356 | 356 | @classmethod |
|
357 | 357 | def convert_mentions(cls, text, mode): |
|
358 | 358 | mention_pat = cls.MENTION_PAT |
|
359 | 359 | |
|
360 | 360 | def wrapp(match_obj): |
|
361 | 361 | uname = match_obj.groups()[0] |
|
362 | 362 | hovercard_url = "pyroutes.url('hovercard_username', {'username': '%s'});" % uname |
|
363 | 363 | |
|
364 | 364 | if mode == 'markdown': |
|
365 | 365 | tmpl = '<strong class="tooltip-hovercard" data-hovercard-alt="{uname}" data-hovercard-url="{hovercard_url}">@{uname}</strong>' |
|
366 | 366 | elif mode == 'rst': |
|
367 | 367 | tmpl = ' **@{uname}** ' |
|
368 | 368 | else: |
|
369 | 369 | raise ValueError('mode must be rst or markdown') |
|
370 | 370 | |
|
371 | 371 | return tmpl.format(**{'uname': uname, |
|
372 | 372 | 'hovercard_url': hovercard_url}) |
|
373 | 373 | |
|
374 | 374 | return mention_pat.sub(wrapp, text).strip() |
|
375 | 375 | |
|
376 | 376 | @classmethod |
|
377 | 377 | def plain(cls, source, universal_newline=True, leading_newline=True): |
|
378 | 378 | source = safe_unicode(source) |
|
379 | 379 | if universal_newline: |
|
380 | 380 | newline = '\n' |
|
381 | 381 | source = newline.join(source.splitlines()) |
|
382 | 382 | |
|
383 | 383 | rendered_source = cls.urlify_text(source) |
|
384 | 384 | source = '' |
|
385 | 385 | if leading_newline: |
|
386 | 386 | source += '<br />' |
|
387 | 387 | source += rendered_source.replace("\n", '<br />') |
|
388 | 388 | |
|
389 | 389 | rendered = cls.bleach_clean(source) |
|
390 | 390 | return rendered |
|
391 | 391 | |
|
392 | 392 | @classmethod |
|
393 | 393 | def markdown(cls, source, safe=True, flavored=True, mentions=False, |
|
394 | 394 | clean_html=True): |
|
395 | 395 | """ |
|
396 | 396 | returns markdown rendered code cleaned by the bleach library |
|
397 | 397 | """ |
|
398 | 398 | |
|
399 | 399 | if flavored: |
|
400 | 400 | markdown_renderer = get_markdown_renderer_flavored( |
|
401 | 401 | cls.extensions, cls.output_format) |
|
402 | 402 | else: |
|
403 | 403 | markdown_renderer = get_markdown_renderer( |
|
404 | 404 | cls.extensions, cls.output_format) |
|
405 | 405 | |
|
406 | 406 | if mentions: |
|
407 | 407 | mention_hl = cls.convert_mentions(source, mode='markdown') |
|
408 | 408 | # we extracted mentions render with this using Mentions false |
|
409 | 409 | return cls.markdown(mention_hl, safe=safe, flavored=flavored, |
|
410 | 410 | mentions=False) |
|
411 | 411 | |
|
412 | 412 | source = safe_unicode(source) |
|
413 | 413 | |
|
414 | 414 | try: |
|
415 | 415 | if flavored: |
|
416 | 416 | source = cls._flavored_markdown(source) |
|
417 | 417 | rendered = markdown_renderer.convert(source) |
|
418 | 418 | except Exception: |
|
419 | 419 | log.exception('Error when rendering Markdown') |
|
420 | 420 | if safe: |
|
421 | 421 | log.debug('Fallback to render in plain mode') |
|
422 | 422 | rendered = cls.plain(source) |
|
423 | 423 | else: |
|
424 | 424 | raise |
|
425 | 425 | |
|
426 | 426 | if clean_html: |
|
427 | 427 | rendered = cls.bleach_clean(rendered) |
|
428 | 428 | return rendered |
|
429 | 429 | |
|
430 | 430 | @classmethod |
|
431 | 431 | def rst(cls, source, safe=True, mentions=False, clean_html=False): |
|
432 | 432 | if mentions: |
|
433 | 433 | mention_hl = cls.convert_mentions(source, mode='rst') |
|
434 | 434 | # we extracted mentions render with this using Mentions false |
|
435 | 435 | return cls.rst(mention_hl, safe=safe, mentions=False) |
|
436 | 436 | |
|
437 | 437 | source = safe_unicode(source) |
|
438 | 438 | try: |
|
439 | 439 | docutils_settings = dict( |
|
440 | 440 | [(alias, None) for alias in |
|
441 | 441 | cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES]) |
|
442 | 442 | |
|
443 | 443 | docutils_settings.update({ |
|
444 | 444 | 'input_encoding': 'unicode', |
|
445 | 445 | 'report_level': 4, |
|
446 | 446 | 'syntax_highlight': 'short', |
|
447 | 447 | }) |
|
448 | 448 | |
|
449 | 449 | for k, v in docutils_settings.iteritems(): |
|
450 | 450 | directives.register_directive(k, v) |
|
451 | 451 | |
|
452 | 452 | parts = publish_parts(source=source, |
|
453 | 453 | writer=RhodeCodeWriter(), |
|
454 | 454 | settings_overrides=docutils_settings) |
|
455 | 455 | rendered = parts["fragment"] |
|
456 | 456 | if clean_html: |
|
457 | 457 | rendered = cls.bleach_clean(rendered) |
|
458 | 458 | return parts['html_title'] + rendered |
|
459 | 459 | except Exception: |
|
460 | 460 | log.exception('Error when rendering RST') |
|
461 | 461 | if safe: |
|
462 | 462 | log.debug('Fallback to render in plain mode') |
|
463 | 463 | return cls.plain(source) |
|
464 | 464 | else: |
|
465 | 465 | raise |
|
466 | 466 | |
|
467 | 467 | @classmethod |
|
468 | 468 | def jupyter(cls, source, safe=True): |
|
469 | 469 | from rhodecode.lib import helpers |
|
470 | 470 | |
|
471 | 471 | from traitlets.config import Config |
|
472 | 472 | import nbformat |
|
473 | 473 | from nbconvert import HTMLExporter |
|
474 | 474 | from nbconvert.preprocessors import Preprocessor |
|
475 | 475 | |
|
476 | 476 | class CustomHTMLExporter(HTMLExporter): |
|
477 | 477 | def _template_file_default(self): |
|
478 | 478 | return 'basic' |
|
479 | 479 | |
|
480 | 480 | class Sandbox(Preprocessor): |
|
481 | 481 | |
|
482 | 482 | def preprocess(self, nb, resources): |
|
483 | 483 | sandbox_text = 'SandBoxed(IPython.core.display.Javascript object)' |
|
484 | 484 | for cell in nb['cells']: |
|
485 | 485 | if not safe: |
|
486 | 486 | continue |
|
487 | 487 | |
|
488 | 488 | if 'outputs' in cell: |
|
489 | 489 | for cell_output in cell['outputs']: |
|
490 | 490 | if 'data' in cell_output: |
|
491 | 491 | if 'application/javascript' in cell_output['data']: |
|
492 | 492 | cell_output['data']['text/plain'] = sandbox_text |
|
493 | 493 | cell_output['data'].pop('application/javascript', None) |
|
494 | 494 | |
|
495 | 495 | if 'source' in cell and cell['cell_type'] == 'markdown': |
|
496 | 496 | # sanitize similar like in markdown |
|
497 | 497 | cell['source'] = cls.bleach_clean(cell['source']) |
|
498 | 498 | |
|
499 | 499 | return nb, resources |
|
500 | 500 | |
|
501 | 501 | def _sanitize_resources(input_resources): |
|
502 | 502 | """ |
|
503 | 503 | Skip/sanitize some of the CSS generated and included in jupyter |
|
504 | 504 | so it doesn't messes up UI so much |
|
505 | 505 | """ |
|
506 | 506 | |
|
507 | 507 | # TODO(marcink): probably we should replace this with whole custom |
|
508 | 508 | # CSS set that doesn't screw up, but jupyter generated html has some |
|
509 | 509 | # special markers, so it requires Custom HTML exporter template with |
|
510 | 510 | # _default_template_path_default, to achieve that |
|
511 | 511 | |
|
512 | 512 | # strip the reset CSS |
|
513 | 513 | input_resources[0] = input_resources[0][input_resources[0].find('/*! Source'):] |
|
514 | 514 | return input_resources |
|
515 | 515 | |
|
516 | 516 | def as_html(notebook): |
|
517 | 517 | conf = Config() |
|
518 | 518 | conf.CustomHTMLExporter.preprocessors = [Sandbox] |
|
519 | 519 | html_exporter = CustomHTMLExporter(config=conf) |
|
520 | 520 | |
|
521 | 521 | (body, resources) = html_exporter.from_notebook_node(notebook) |
|
522 | 522 | header = '<!-- ## IPYTHON NOTEBOOK RENDERING ## -->' |
|
523 | 523 | js = MakoTemplate(r''' |
|
524 | 524 | <!-- MathJax configuration --> |
|
525 | 525 | <script type="text/x-mathjax-config"> |
|
526 | 526 | MathJax.Hub.Config({ |
|
527 | 527 | jax: ["input/TeX","output/HTML-CSS", "output/PreviewHTML"], |
|
528 | 528 | extensions: ["tex2jax.js","MathMenu.js","MathZoom.js", "fast-preview.js", "AssistiveMML.js", "[Contrib]/a11y/accessibility-menu.js"], |
|
529 | 529 | TeX: { |
|
530 | 530 | extensions: ["AMSmath.js","AMSsymbols.js","noErrors.js","noUndefined.js"] |
|
531 | 531 | }, |
|
532 | 532 | tex2jax: { |
|
533 | 533 | inlineMath: [ ['$','$'], ["\\(","\\)"] ], |
|
534 | 534 | displayMath: [ ['$$','$$'], ["\\[","\\]"] ], |
|
535 | 535 | processEscapes: true, |
|
536 | 536 | processEnvironments: true |
|
537 | 537 | }, |
|
538 | 538 | // Center justify equations in code and markdown cells. Elsewhere |
|
539 | 539 | // we use CSS to left justify single line equations in code cells. |
|
540 | 540 | displayAlign: 'center', |
|
541 | 541 | "HTML-CSS": { |
|
542 | 542 | styles: {'.MathJax_Display': {"margin": 0}}, |
|
543 | 543 | linebreaks: { automatic: true }, |
|
544 | 544 | availableFonts: ["STIX", "TeX"] |
|
545 | 545 | }, |
|
546 | 546 | showMathMenu: false |
|
547 | 547 | }); |
|
548 | 548 | </script> |
|
549 | 549 | <!-- End of MathJax configuration --> |
|
550 | 550 | <script src="${h.asset('js/src/math_jax/MathJax.js')}"></script> |
|
551 | 551 | ''').render(h=helpers) |
|
552 | 552 | |
|
553 | 553 | css = MakoTemplate(r''' |
|
554 | 554 | <link rel="stylesheet" type="text/css" href="${h.asset('css/style-ipython.css', ver=ver)}" media="screen"/> |
|
555 | 555 | ''').render(h=helpers, ver='ver1') |
|
556 | 556 | |
|
557 | 557 | body = '\n'.join([header, css, js, body]) |
|
558 | 558 | return body, resources |
|
559 | 559 | |
|
560 | 560 | notebook = nbformat.reads(source, as_version=4) |
|
561 | 561 | (body, resources) = as_html(notebook) |
|
562 | 562 | return body |
|
563 | 563 | |
|
564 | 564 | |
|
565 | 565 | class RstTemplateRenderer(object): |
|
566 | 566 | |
|
567 | 567 | def __init__(self): |
|
568 | 568 | base = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) |
|
569 | 569 | rst_template_dirs = [os.path.join(base, 'templates', 'rst_templates')] |
|
570 | 570 | self.template_store = TemplateLookup( |
|
571 | 571 | directories=rst_template_dirs, |
|
572 | 572 | input_encoding='utf-8', |
|
573 | 573 | imports=['from rhodecode.lib import helpers as h']) |
|
574 | 574 | |
|
575 | 575 | def _get_template(self, templatename): |
|
576 | 576 | return self.template_store.get_template(templatename) |
|
577 | 577 | |
|
578 | 578 | def render(self, template_name, **kwargs): |
|
579 | 579 | template = self._get_template(template_name) |
|
580 | 580 | return template.render(**kwargs) |
@@ -1,156 +1,156 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | SimpleGit middleware for handling git protocol request (push/clone etc.) |
|
23 | 23 | It's implemented with basic auth function |
|
24 | 24 | """ |
|
25 | 25 | import os |
|
26 | 26 | import re |
|
27 | 27 | import logging |
|
28 | import urlparse | |
|
28 | import urllib.parse | |
|
29 | 29 | |
|
30 | 30 | import rhodecode |
|
31 | 31 | from rhodecode.lib import utils |
|
32 | 32 | from rhodecode.lib import utils2 |
|
33 | 33 | from rhodecode.lib.middleware import simplevcs |
|
34 | 34 | |
|
35 | 35 | log = logging.getLogger(__name__) |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | GIT_PROTO_PAT = re.compile( |
|
39 | 39 | r'^/(.+)/(info/refs|info/lfs/(.+)|git-upload-pack|git-receive-pack)') |
|
40 | 40 | GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))') |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | def default_lfs_store(): |
|
44 | 44 | """ |
|
45 | 45 | Default lfs store location, it's consistent with Mercurials large file |
|
46 | 46 | store which is in .cache/largefiles |
|
47 | 47 | """ |
|
48 | 48 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
49 | 49 | user_home = os.path.expanduser("~") |
|
50 | 50 | return lfs_store(user_home) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class SimpleGit(simplevcs.SimpleVCS): |
|
54 | 54 | |
|
55 | 55 | SCM = 'git' |
|
56 | 56 | |
|
57 | 57 | def _get_repository_name(self, environ): |
|
58 | 58 | """ |
|
59 | 59 | Gets repository name out of PATH_INFO header |
|
60 | 60 | |
|
61 | 61 | :param environ: environ where PATH_INFO is stored |
|
62 | 62 | """ |
|
63 | 63 | repo_name = GIT_PROTO_PAT.match(environ['PATH_INFO']).group(1) |
|
64 | 64 | # for GIT LFS, and bare format strip .git suffix from names |
|
65 | 65 | if repo_name.endswith('.git'): |
|
66 | 66 | repo_name = repo_name[:-4] |
|
67 | 67 | return repo_name |
|
68 | 68 | |
|
69 | 69 | def _get_lfs_action(self, path, request_method): |
|
70 | 70 | """ |
|
71 | 71 | return an action based on LFS requests type. |
|
72 | 72 | Those routes are handled inside vcsserver app. |
|
73 | 73 | |
|
74 | 74 | batch -> POST to /info/lfs/objects/batch => PUSH/PULL |
|
75 | 75 | batch is based on the `operation. |
|
76 | 76 | that could be download or upload, but those are only |
|
77 | 77 | instructions to fetch so we return pull always |
|
78 | 78 | |
|
79 | 79 | download -> GET to /info/lfs/{oid} => PULL |
|
80 | 80 | upload -> PUT to /info/lfs/{oid} => PUSH |
|
81 | 81 | |
|
82 | 82 | verification -> POST to /info/lfs/verify => PULL |
|
83 | 83 | |
|
84 | 84 | """ |
|
85 | 85 | |
|
86 | 86 | match_obj = GIT_LFS_PROTO_PAT.match(path) |
|
87 | 87 | _parts = match_obj.groups() |
|
88 | 88 | repo_name, path, operation = _parts |
|
89 | 89 | log.debug( |
|
90 | 90 | 'LFS: detecting operation based on following ' |
|
91 | 91 | 'data: %s, req_method:%s', _parts, request_method) |
|
92 | 92 | |
|
93 | 93 | if operation == 'verify': |
|
94 | 94 | return 'pull' |
|
95 | 95 | elif operation == 'objects/batch': |
|
96 | 96 | # batch sends back instructions for API to dl/upl we report it |
|
97 | 97 | # as pull |
|
98 | 98 | if request_method == 'POST': |
|
99 | 99 | return 'pull' |
|
100 | 100 | |
|
101 | 101 | elif operation: |
|
102 | 102 | # probably a OID, upload is PUT, download a GET |
|
103 | 103 | if request_method == 'GET': |
|
104 | 104 | return 'pull' |
|
105 | 105 | else: |
|
106 | 106 | return 'push' |
|
107 | 107 | |
|
108 | 108 | # if default not found require push, as action |
|
109 | 109 | return 'push' |
|
110 | 110 | |
|
111 | 111 | _ACTION_MAPPING = { |
|
112 | 112 | 'git-receive-pack': 'push', |
|
113 | 113 | 'git-upload-pack': 'pull', |
|
114 | 114 | } |
|
115 | 115 | |
|
116 | 116 | def _get_action(self, environ): |
|
117 | 117 | """ |
|
118 | 118 | Maps git request commands into a pull or push command. |
|
119 | 119 | In case of unknown/unexpected data, it returns 'pull' to be safe. |
|
120 | 120 | |
|
121 | 121 | :param environ: |
|
122 | 122 | """ |
|
123 | 123 | path = environ['PATH_INFO'] |
|
124 | 124 | |
|
125 | 125 | if path.endswith('/info/refs'): |
|
126 | query = urlparse.parse_qs(environ['QUERY_STRING']) | |
|
126 | query = urllib.parse.urlparse.parse_qs(environ['QUERY_STRING']) | |
|
127 | 127 | service_cmd = query.get('service', [''])[0] |
|
128 | 128 | return self._ACTION_MAPPING.get(service_cmd, 'pull') |
|
129 | 129 | |
|
130 | 130 | elif GIT_LFS_PROTO_PAT.match(environ['PATH_INFO']): |
|
131 | 131 | return self._get_lfs_action( |
|
132 | 132 | environ['PATH_INFO'], environ['REQUEST_METHOD']) |
|
133 | 133 | |
|
134 | 134 | elif path.endswith('/git-receive-pack'): |
|
135 | 135 | return 'push' |
|
136 | 136 | elif path.endswith('/git-upload-pack'): |
|
137 | 137 | return 'pull' |
|
138 | 138 | |
|
139 | 139 | return 'pull' |
|
140 | 140 | |
|
141 | 141 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
142 | 142 | return self.scm_app.create_git_wsgi_app( |
|
143 | 143 | repo_path, repo_name, config) |
|
144 | 144 | |
|
145 | 145 | def _create_config(self, extras, repo_name, scheme='http'): |
|
146 | 146 | extras['git_update_server_info'] = utils2.str2bool( |
|
147 | 147 | rhodecode.CONFIG.get('git_update_server_info')) |
|
148 | 148 | |
|
149 | 149 | config = utils.make_db_config(repo=repo_name) |
|
150 | 150 | custom_store = config.get('vcs_git_lfs', 'store_location') |
|
151 | 151 | |
|
152 | 152 | extras['git_lfs_enabled'] = utils2.str2bool( |
|
153 | 153 | config.get('vcs_git_lfs', 'enabled')) |
|
154 | 154 | extras['git_lfs_store_path'] = custom_store or default_lfs_store() |
|
155 | 155 | extras['git_lfs_http_scheme'] = scheme |
|
156 | 156 | return extras |
@@ -1,160 +1,160 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | SimpleHG middleware for handling mercurial protocol request |
|
23 | 23 | (push/clone etc.). It's implemented with basic auth function |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | import urlparse | |
|
27 | import urllib.parse | |
|
28 | 28 | import urllib.request, urllib.parse, urllib.error |
|
29 | 29 | |
|
30 | 30 | from rhodecode.lib import utils |
|
31 | 31 | from rhodecode.lib.ext_json import json |
|
32 | 32 | from rhodecode.lib.middleware import simplevcs |
|
33 | 33 | |
|
34 | 34 | log = logging.getLogger(__name__) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | class SimpleHg(simplevcs.SimpleVCS): |
|
38 | 38 | |
|
39 | 39 | SCM = 'hg' |
|
40 | 40 | |
|
41 | 41 | def _get_repository_name(self, environ): |
|
42 | 42 | """ |
|
43 | 43 | Gets repository name out of PATH_INFO header |
|
44 | 44 | |
|
45 | 45 | :param environ: environ where PATH_INFO is stored |
|
46 | 46 | """ |
|
47 | 47 | repo_name = environ['PATH_INFO'] |
|
48 | 48 | if repo_name and repo_name.startswith('/'): |
|
49 | 49 | # remove only the first leading / |
|
50 | 50 | repo_name = repo_name[1:] |
|
51 | 51 | return repo_name.rstrip('/') |
|
52 | 52 | |
|
53 | 53 | _ACTION_MAPPING = { |
|
54 | 54 | 'changegroup': 'pull', |
|
55 | 55 | 'changegroupsubset': 'pull', |
|
56 | 56 | 'getbundle': 'pull', |
|
57 | 57 | 'stream_out': 'pull', |
|
58 | 58 | 'listkeys': 'pull', |
|
59 | 59 | 'between': 'pull', |
|
60 | 60 | 'branchmap': 'pull', |
|
61 | 61 | 'branches': 'pull', |
|
62 | 62 | 'clonebundles': 'pull', |
|
63 | 63 | 'capabilities': 'pull', |
|
64 | 64 | 'debugwireargs': 'pull', |
|
65 | 65 | 'heads': 'pull', |
|
66 | 66 | 'lookup': 'pull', |
|
67 | 67 | 'hello': 'pull', |
|
68 | 68 | 'known': 'pull', |
|
69 | 69 | |
|
70 | 70 | # largefiles |
|
71 | 71 | 'putlfile': 'push', |
|
72 | 72 | 'getlfile': 'pull', |
|
73 | 73 | 'statlfile': 'pull', |
|
74 | 74 | 'lheads': 'pull', |
|
75 | 75 | |
|
76 | 76 | # evolve |
|
77 | 77 | 'evoext_obshashrange_v1': 'pull', |
|
78 | 78 | 'evoext_obshash': 'pull', |
|
79 | 79 | 'evoext_obshash1': 'pull', |
|
80 | 80 | |
|
81 | 81 | 'unbundle': 'push', |
|
82 | 82 | 'pushkey': 'push', |
|
83 | 83 | } |
|
84 | 84 | |
|
85 | 85 | @classmethod |
|
86 | 86 | def _get_xarg_headers(cls, environ): |
|
87 | 87 | i = 1 |
|
88 | 88 | chunks = [] # gather chunks stored in multiple 'hgarg_N' |
|
89 | 89 | while True: |
|
90 | 90 | head = environ.get('HTTP_X_HGARG_{}'.format(i)) |
|
91 | 91 | if not head: |
|
92 | 92 | break |
|
93 | 93 | i += 1 |
|
94 | 94 | chunks.append(urllib.parse.unquote_plus(head)) |
|
95 | 95 | full_arg = ''.join(chunks) |
|
96 | 96 | pref = 'cmds=' |
|
97 | 97 | if full_arg.startswith(pref): |
|
98 | 98 | # strip the cmds= header defining our batch commands |
|
99 | 99 | full_arg = full_arg[len(pref):] |
|
100 | 100 | cmds = full_arg.split(';') |
|
101 | 101 | return cmds |
|
102 | 102 | |
|
103 | 103 | @classmethod |
|
104 | 104 | def _get_batch_cmd(cls, environ): |
|
105 | 105 | """ |
|
106 | 106 | Handle batch command send commands. Those are ';' separated commands |
|
107 | 107 | sent by batch command that server needs to execute. We need to extract |
|
108 | 108 | those, and map them to our ACTION_MAPPING to get all push/pull commands |
|
109 | 109 | specified in the batch |
|
110 | 110 | """ |
|
111 | 111 | default = 'push' |
|
112 | 112 | batch_cmds = [] |
|
113 | 113 | try: |
|
114 | 114 | cmds = cls._get_xarg_headers(environ) |
|
115 | 115 | for pair in cmds: |
|
116 | 116 | parts = pair.split(' ', 1) |
|
117 | 117 | if len(parts) != 2: |
|
118 | 118 | continue |
|
119 | 119 | # entry should be in a format `key ARGS` |
|
120 | 120 | cmd, args = parts |
|
121 | 121 | action = cls._ACTION_MAPPING.get(cmd, default) |
|
122 | 122 | batch_cmds.append(action) |
|
123 | 123 | except Exception: |
|
124 | 124 | log.exception('Failed to extract batch commands operations') |
|
125 | 125 | |
|
126 | 126 | # in case we failed, (e.g malformed data) assume it's PUSH sub-command |
|
127 | 127 | # for safety |
|
128 | 128 | return batch_cmds or [default] |
|
129 | 129 | |
|
130 | 130 | def _get_action(self, environ): |
|
131 | 131 | """ |
|
132 | 132 | Maps mercurial request commands into a pull or push command. |
|
133 | 133 | In case of unknown/unexpected data, it returns 'push' to be safe. |
|
134 | 134 | |
|
135 | 135 | :param environ: |
|
136 | 136 | """ |
|
137 | 137 | default = 'push' |
|
138 | query = urlparse.parse_qs(environ['QUERY_STRING'], | |
|
138 | query = urllib.parse.urlparse.parse_qs(environ['QUERY_STRING'], | |
|
139 | 139 | keep_blank_values=True) |
|
140 | 140 | |
|
141 | 141 | if 'cmd' in query: |
|
142 | 142 | cmd = query['cmd'][0] |
|
143 | 143 | if cmd == 'batch': |
|
144 | 144 | cmds = self._get_batch_cmd(environ) |
|
145 | 145 | if 'push' in cmds: |
|
146 | 146 | return 'push' |
|
147 | 147 | else: |
|
148 | 148 | return 'pull' |
|
149 | 149 | return self._ACTION_MAPPING.get(cmd, default) |
|
150 | 150 | |
|
151 | 151 | return default |
|
152 | 152 | |
|
153 | 153 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
154 | 154 | return self.scm_app.create_hg_wsgi_app(repo_path, repo_name, config) |
|
155 | 155 | |
|
156 | 156 | def _create_config(self, extras, repo_name, scheme='http'): |
|
157 | 157 | config = utils.make_db_config(repo=repo_name) |
|
158 | 158 | config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
159 | 159 | |
|
160 | 160 | return config.serialize() |
@@ -1,229 +1,229 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import base64 |
|
22 | 22 | import logging |
|
23 | 23 | import urllib.request, urllib.parse, urllib.error |
|
24 | import urlparse | |
|
24 | import urllib.parse | |
|
25 | 25 | |
|
26 | 26 | import requests |
|
27 | 27 | from pyramid.httpexceptions import HTTPNotAcceptable |
|
28 | 28 | |
|
29 | 29 | from rhodecode.lib import rc_cache |
|
30 | 30 | from rhodecode.lib.middleware import simplevcs |
|
31 | 31 | from rhodecode.lib.utils import is_valid_repo |
|
32 | 32 | from rhodecode.lib.utils2 import str2bool, safe_int, safe_str |
|
33 | 33 | from rhodecode.lib.ext_json import json |
|
34 | 34 | from rhodecode.lib.hooks_daemon import store_txn_id_data |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | class SimpleSvnApp(object): |
|
41 | 41 | IGNORED_HEADERS = [ |
|
42 | 42 | 'connection', 'keep-alive', 'content-encoding', |
|
43 | 43 | 'transfer-encoding', 'content-length'] |
|
44 | 44 | rc_extras = {} |
|
45 | 45 | |
|
46 | 46 | def __init__(self, config): |
|
47 | 47 | self.config = config |
|
48 | 48 | |
|
49 | 49 | def __call__(self, environ, start_response): |
|
50 | 50 | request_headers = self._get_request_headers(environ) |
|
51 | 51 | data = environ['wsgi.input'] |
|
52 | 52 | req_method = environ['REQUEST_METHOD'] |
|
53 | 53 | has_content_length = 'CONTENT_LENGTH' in environ |
|
54 | 54 | path_info = self._get_url( |
|
55 | 55 | self.config.get('subversion_http_server_url', ''), environ['PATH_INFO']) |
|
56 | 56 | transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '') |
|
57 | 57 | log.debug('Handling: %s method via `%s`', req_method, path_info) |
|
58 | 58 | |
|
59 | 59 | # stream control flag, based on request and content type... |
|
60 | 60 | stream = False |
|
61 | 61 | |
|
62 | 62 | if req_method in ['MKCOL'] or has_content_length: |
|
63 | 63 | data_processed = False |
|
64 | 64 | # read chunk to check if we have txn-with-props |
|
65 | 65 | initial_data = data.read(1024) |
|
66 | 66 | if initial_data.startswith('(create-txn-with-props'): |
|
67 | 67 | data = initial_data + data.read() |
|
68 | 68 | # store on-the-fly our rc_extra using svn revision properties |
|
69 | 69 | # those can be read later on in hooks executed so we have a way |
|
70 | 70 | # to pass in the data into svn hooks |
|
71 | 71 | rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras)) |
|
72 | 72 | rc_data_len = len(rc_data) |
|
73 | 73 | # header defines data length, and serialized data |
|
74 | 74 | skel = ' rc-scm-extras {} {}'.format(rc_data_len, rc_data) |
|
75 | 75 | data = data[:-2] + skel + '))' |
|
76 | 76 | data_processed = True |
|
77 | 77 | |
|
78 | 78 | if not data_processed: |
|
79 | 79 | # NOTE(johbo): Avoid that we end up with sending the request in chunked |
|
80 | 80 | # transfer encoding (mainly on Gunicorn). If we know the content |
|
81 | 81 | # length, then we should transfer the payload in one request. |
|
82 | 82 | data = initial_data + data.read() |
|
83 | 83 | |
|
84 | 84 | if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked': |
|
85 | 85 | # NOTE(marcink): when getting/uploading files we want to STREAM content |
|
86 | 86 | # back to the client/proxy instead of buffering it here... |
|
87 | 87 | stream = True |
|
88 | 88 | |
|
89 | 89 | stream = stream |
|
90 | 90 | log.debug('Calling SVN PROXY at `%s`, using method:%s. Stream: %s', |
|
91 | 91 | path_info, req_method, stream) |
|
92 | 92 | try: |
|
93 | 93 | response = requests.request( |
|
94 | 94 | req_method, path_info, |
|
95 | 95 | data=data, headers=request_headers, stream=stream) |
|
96 | 96 | except requests.ConnectionError: |
|
97 | 97 | log.exception('ConnectionError occurred for endpoint %s', path_info) |
|
98 | 98 | raise |
|
99 | 99 | |
|
100 | 100 | if response.status_code not in [200, 401]: |
|
101 | 101 | from rhodecode.lib.utils2 import safe_str |
|
102 | 102 | text = '\n{}'.format(safe_str(response.text)) if response.text else '' |
|
103 | 103 | if response.status_code >= 500: |
|
104 | 104 | log.error('Got SVN response:%s with text:`%s`', response, text) |
|
105 | 105 | else: |
|
106 | 106 | log.debug('Got SVN response:%s with text:`%s`', response, text) |
|
107 | 107 | else: |
|
108 | 108 | log.debug('got response code: %s', response.status_code) |
|
109 | 109 | |
|
110 | 110 | response_headers = self._get_response_headers(response.headers) |
|
111 | 111 | |
|
112 | 112 | if response.headers.get('SVN-Txn-name'): |
|
113 | 113 | svn_tx_id = response.headers.get('SVN-Txn-name') |
|
114 | 114 | txn_id = rc_cache.utils.compute_key_from_params( |
|
115 | 115 | self.config['repository'], svn_tx_id) |
|
116 | 116 | port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) |
|
117 | 117 | store_txn_id_data(txn_id, {'port': port}) |
|
118 | 118 | |
|
119 | 119 | start_response( |
|
120 | 120 | '{} {}'.format(response.status_code, response.reason), |
|
121 | 121 | response_headers) |
|
122 | 122 | return response.iter_content(chunk_size=1024) |
|
123 | 123 | |
|
124 | 124 | def _get_url(self, svn_http_server, path): |
|
125 | 125 | svn_http_server_url = (svn_http_server or '').rstrip('/') |
|
126 | url_path = urlparse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/')) | |
|
126 | url_path = urllib.parse.urlparse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/')) | |
|
127 | 127 | url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'") |
|
128 | 128 | return url_path |
|
129 | 129 | |
|
130 | 130 | def _get_request_headers(self, environ): |
|
131 | 131 | headers = {} |
|
132 | 132 | |
|
133 | 133 | for key in environ: |
|
134 | 134 | if not key.startswith('HTTP_'): |
|
135 | 135 | continue |
|
136 | 136 | new_key = key.split('_') |
|
137 | 137 | new_key = [k.capitalize() for k in new_key[1:]] |
|
138 | 138 | new_key = '-'.join(new_key) |
|
139 | 139 | headers[new_key] = environ[key] |
|
140 | 140 | |
|
141 | 141 | if 'CONTENT_TYPE' in environ: |
|
142 | 142 | headers['Content-Type'] = environ['CONTENT_TYPE'] |
|
143 | 143 | |
|
144 | 144 | if 'CONTENT_LENGTH' in environ: |
|
145 | 145 | headers['Content-Length'] = environ['CONTENT_LENGTH'] |
|
146 | 146 | |
|
147 | 147 | return headers |
|
148 | 148 | |
|
149 | 149 | def _get_response_headers(self, headers): |
|
150 | 150 | headers = [ |
|
151 | 151 | (h, headers[h]) |
|
152 | 152 | for h in headers |
|
153 | 153 | if h.lower() not in self.IGNORED_HEADERS |
|
154 | 154 | ] |
|
155 | 155 | |
|
156 | 156 | return headers |
|
157 | 157 | |
|
158 | 158 | |
|
159 | 159 | class DisabledSimpleSvnApp(object): |
|
160 | 160 | def __init__(self, config): |
|
161 | 161 | self.config = config |
|
162 | 162 | |
|
163 | 163 | def __call__(self, environ, start_response): |
|
164 | 164 | reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled' |
|
165 | 165 | log.warning(reason) |
|
166 | 166 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | class SimpleSvn(simplevcs.SimpleVCS): |
|
170 | 170 | |
|
171 | 171 | SCM = 'svn' |
|
172 | 172 | READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT') |
|
173 | 173 | DEFAULT_HTTP_SERVER = 'http://localhost:8090' |
|
174 | 174 | |
|
175 | 175 | def _get_repository_name(self, environ): |
|
176 | 176 | """ |
|
177 | 177 | Gets repository name out of PATH_INFO header |
|
178 | 178 | |
|
179 | 179 | :param environ: environ where PATH_INFO is stored |
|
180 | 180 | """ |
|
181 | 181 | path = environ['PATH_INFO'].split('!') |
|
182 | 182 | repo_name = path[0].strip('/') |
|
183 | 183 | |
|
184 | 184 | # SVN includes the whole path in it's requests, including |
|
185 | 185 | # subdirectories inside the repo. Therefore we have to search for |
|
186 | 186 | # the repo root directory. |
|
187 | 187 | if not is_valid_repo( |
|
188 | 188 | repo_name, self.base_path, explicit_scm=self.SCM): |
|
189 | 189 | current_path = '' |
|
190 | 190 | for component in repo_name.split('/'): |
|
191 | 191 | current_path += component |
|
192 | 192 | if is_valid_repo( |
|
193 | 193 | current_path, self.base_path, explicit_scm=self.SCM): |
|
194 | 194 | return current_path |
|
195 | 195 | current_path += '/' |
|
196 | 196 | |
|
197 | 197 | return repo_name |
|
198 | 198 | |
|
199 | 199 | def _get_action(self, environ): |
|
200 | 200 | return ( |
|
201 | 201 | 'pull' |
|
202 | 202 | if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS |
|
203 | 203 | else 'push') |
|
204 | 204 | |
|
205 | 205 | def _should_use_callback_daemon(self, extras, environ, action): |
|
206 | 206 | # only MERGE command triggers hooks, so we don't want to start |
|
207 | 207 | # hooks server too many times. POST however starts the svn transaction |
|
208 | 208 | # so we also need to run the init of callback daemon of POST |
|
209 | 209 | if environ['REQUEST_METHOD'] in ['MERGE', 'POST']: |
|
210 | 210 | return True |
|
211 | 211 | return False |
|
212 | 212 | |
|
213 | 213 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
214 | 214 | if self._is_svn_enabled(): |
|
215 | 215 | return SimpleSvnApp(config) |
|
216 | 216 | # we don't have http proxy enabled return dummy request handler |
|
217 | 217 | return DisabledSimpleSvnApp(config) |
|
218 | 218 | |
|
219 | 219 | def _is_svn_enabled(self): |
|
220 | 220 | conf = self.repo_vcs_config |
|
221 | 221 | return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) |
|
222 | 222 | |
|
223 | 223 | def _create_config(self, extras, repo_name, scheme='http'): |
|
224 | 224 | conf = self.repo_vcs_config |
|
225 | 225 | server_url = conf.get('vcs_svn_proxy', 'http_server_url') |
|
226 | 226 | server_url = server_url or self.DEFAULT_HTTP_SERVER |
|
227 | 227 | |
|
228 | 228 | extras['subversion_http_server_url'] = server_url |
|
229 | 229 | return extras |
@@ -1,189 +1,189 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Implementation of the scm_app interface using raw HTTP communication. |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import base64 |
|
26 | 26 | import logging |
|
27 | import urlparse | |
|
27 | import urllib.parse | |
|
28 | 28 | import wsgiref.util |
|
29 | 29 | |
|
30 | 30 | import msgpack |
|
31 | 31 | import requests |
|
32 | 32 | import webob.request |
|
33 | 33 | |
|
34 | 34 | import rhodecode |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def create_git_wsgi_app(repo_path, repo_name, config): |
|
41 | 41 | url = _vcs_streaming_url() + 'git/' |
|
42 | 42 | return VcsHttpProxy(url, repo_path, repo_name, config) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def create_hg_wsgi_app(repo_path, repo_name, config): |
|
46 | 46 | url = _vcs_streaming_url() + 'hg/' |
|
47 | 47 | return VcsHttpProxy(url, repo_path, repo_name, config) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def _vcs_streaming_url(): |
|
51 | 51 | template = 'http://{}/stream/' |
|
52 | 52 | return template.format(rhodecode.CONFIG['vcs.server']) |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | # TODO: johbo: Avoid the global. |
|
56 | 56 | session = requests.Session() |
|
57 | 57 | # Requests speedup, avoid reading .netrc and similar |
|
58 | 58 | session.trust_env = False |
|
59 | 59 | |
|
60 | 60 | # prevent urllib3 spawning our logs. |
|
61 | 61 | logging.getLogger("requests.packages.urllib3.connectionpool").setLevel( |
|
62 | 62 | logging.WARNING) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class VcsHttpProxy(object): |
|
66 | 66 | """ |
|
67 | 67 | A WSGI application which proxies vcs requests. |
|
68 | 68 | |
|
69 | 69 | The goal is to shuffle the data around without touching it. The only |
|
70 | 70 | exception is the extra data from the config object which we send to the |
|
71 | 71 | server as well. |
|
72 | 72 | """ |
|
73 | 73 | |
|
74 | 74 | def __init__(self, url, repo_path, repo_name, config): |
|
75 | 75 | """ |
|
76 | 76 | :param str url: The URL of the VCSServer to call. |
|
77 | 77 | """ |
|
78 | 78 | self._url = url |
|
79 | 79 | self._repo_name = repo_name |
|
80 | 80 | self._repo_path = repo_path |
|
81 | 81 | self._config = config |
|
82 | 82 | self.rc_extras = {} |
|
83 | 83 | log.debug( |
|
84 | 84 | "Creating VcsHttpProxy for repo %s, url %s", |
|
85 | 85 | repo_name, url) |
|
86 | 86 | |
|
87 | 87 | def __call__(self, environ, start_response): |
|
88 | 88 | config = msgpack.packb(self._config) |
|
89 | 89 | request = webob.request.Request(environ) |
|
90 | 90 | request_headers = request.headers |
|
91 | 91 | |
|
92 | 92 | request_headers.update({ |
|
93 | 93 | # TODO: johbo: Remove this, rely on URL path only |
|
94 | 94 | 'X-RC-Repo-Name': self._repo_name, |
|
95 | 95 | 'X-RC-Repo-Path': self._repo_path, |
|
96 | 96 | 'X-RC-Path-Info': environ['PATH_INFO'], |
|
97 | 97 | |
|
98 | 98 | 'X-RC-Repo-Store': self.rc_extras.get('repo_store'), |
|
99 | 99 | 'X-RC-Server-Config-File': self.rc_extras.get('config'), |
|
100 | 100 | |
|
101 | 101 | 'X-RC-Auth-User': self.rc_extras.get('username'), |
|
102 | 102 | 'X-RC-Auth-User-Id': str(self.rc_extras.get('user_id')), |
|
103 | 103 | 'X-RC-Auth-User-Ip': self.rc_extras.get('ip'), |
|
104 | 104 | |
|
105 | 105 | # TODO: johbo: Avoid encoding and put this into payload? |
|
106 | 106 | 'X-RC-Repo-Config': base64.b64encode(config), |
|
107 | 107 | 'X-RC-Locked-Status-Code': rhodecode.CONFIG.get('lock_ret_code'), |
|
108 | 108 | }) |
|
109 | 109 | |
|
110 | 110 | method = environ['REQUEST_METHOD'] |
|
111 | 111 | |
|
112 | 112 | # Preserve the query string |
|
113 | 113 | url = self._url |
|
114 | url = urlparse.urljoin(url, self._repo_name) | |
|
114 | url = urllib.parse.urlparse.urljoin(url, self._repo_name) | |
|
115 | 115 | if environ.get('QUERY_STRING'): |
|
116 | 116 | url += '?' + environ['QUERY_STRING'] |
|
117 | 117 | |
|
118 | 118 | log.debug('http-app: preparing request to: %s', url) |
|
119 | 119 | response = session.request( |
|
120 | 120 | method, |
|
121 | 121 | url, |
|
122 | 122 | data=_maybe_stream_request(environ), |
|
123 | 123 | headers=request_headers, |
|
124 | 124 | stream=True) |
|
125 | 125 | |
|
126 | 126 | log.debug('http-app: got vcsserver response: %s', response) |
|
127 | 127 | if response.status_code >= 500: |
|
128 | 128 | log.error('Exception returned by vcsserver at: %s %s, %s', |
|
129 | 129 | url, response.status_code, response.content) |
|
130 | 130 | |
|
131 | 131 | # Preserve the headers of the response, except hop_by_hop ones |
|
132 | 132 | response_headers = [ |
|
133 | 133 | (h, v) for h, v in response.headers.items() |
|
134 | 134 | if not wsgiref.util.is_hop_by_hop(h) |
|
135 | 135 | ] |
|
136 | 136 | |
|
137 | 137 | # Build status argument for start_response callable. |
|
138 | 138 | status = '{status_code} {reason_phrase}'.format( |
|
139 | 139 | status_code=response.status_code, |
|
140 | 140 | reason_phrase=response.reason) |
|
141 | 141 | |
|
142 | 142 | start_response(status, response_headers) |
|
143 | 143 | return _maybe_stream_response(response) |
|
144 | 144 | |
|
145 | 145 | |
|
146 | 146 | def read_in_chunks(stream_obj, block_size=1024, chunks=-1): |
|
147 | 147 | """ |
|
148 | 148 | Read Stream in chunks, default chunk size: 1k. |
|
149 | 149 | """ |
|
150 | 150 | while chunks: |
|
151 | 151 | data = stream_obj.read(block_size) |
|
152 | 152 | if not data: |
|
153 | 153 | break |
|
154 | 154 | yield data |
|
155 | 155 | chunks -= 1 |
|
156 | 156 | |
|
157 | 157 | |
|
158 | 158 | def _is_request_chunked(environ): |
|
159 | 159 | stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked' |
|
160 | 160 | return stream |
|
161 | 161 | |
|
162 | 162 | |
|
163 | 163 | def _maybe_stream_request(environ): |
|
164 | 164 | path = environ['PATH_INFO'] |
|
165 | 165 | stream = _is_request_chunked(environ) |
|
166 | 166 | log.debug('handling request `%s` with stream support: %s', path, stream) |
|
167 | 167 | |
|
168 | 168 | if stream: |
|
169 | 169 | # set stream by 256k |
|
170 | 170 | return read_in_chunks(environ['wsgi.input'], block_size=1024 * 256) |
|
171 | 171 | else: |
|
172 | 172 | return environ['wsgi.input'].read() |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | def _maybe_stream_response(response): |
|
176 | 176 | """ |
|
177 | 177 | Try to generate chunks from the response if it is chunked. |
|
178 | 178 | """ |
|
179 | 179 | stream = _is_chunked(response) |
|
180 | 180 | log.debug('returning response with stream: %s', stream) |
|
181 | 181 | if stream: |
|
182 | 182 | # read in 256k Chunks |
|
183 | 183 | return response.raw.read_chunked(amt=1024 * 256) |
|
184 | 184 | else: |
|
185 | 185 | return [response.content] |
|
186 | 186 | |
|
187 | 187 | |
|
188 | 188 | def _is_chunked(response): |
|
189 | 189 | return response.headers.get('Transfer-Encoding', '') == 'chunked' |
@@ -1,284 +1,284 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import gzip |
|
22 | 22 | import shutil |
|
23 | 23 | import logging |
|
24 | 24 | import tempfile |
|
25 | import urlparse | |
|
25 | import urllib.parse | |
|
26 | 26 | |
|
27 | 27 | from webob.exc import HTTPNotFound |
|
28 | 28 | |
|
29 | 29 | import rhodecode |
|
30 | 30 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
31 | 31 | from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT |
|
32 | 32 | from rhodecode.lib.middleware.simplehg import SimpleHg |
|
33 | 33 | from rhodecode.lib.middleware.simplesvn import SimpleSvn |
|
34 | 34 | from rhodecode.model.settings import VcsSettingsModel |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | VCS_TYPE_KEY = '_rc_vcs_type' |
|
39 | 39 | VCS_TYPE_SKIP = '_rc_vcs_skip' |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def is_git(environ): |
|
43 | 43 | """ |
|
44 | 44 | Returns True if requests should be handled by GIT wsgi middleware |
|
45 | 45 | """ |
|
46 | 46 | is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO']) |
|
47 | 47 | log.debug( |
|
48 | 48 | 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'], |
|
49 | 49 | is_git_path is not None) |
|
50 | 50 | |
|
51 | 51 | return is_git_path |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | def is_hg(environ): |
|
55 | 55 | """ |
|
56 | 56 | Returns True if requests target is mercurial server - header |
|
57 | 57 | ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``. |
|
58 | 58 | """ |
|
59 | 59 | is_hg_path = False |
|
60 | 60 | |
|
61 | 61 | http_accept = environ.get('HTTP_ACCEPT') |
|
62 | 62 | |
|
63 | 63 | if http_accept and http_accept.startswith('application/mercurial'): |
|
64 | query = urlparse.parse_qs(environ['QUERY_STRING']) | |
|
64 | query = urllib.parse.urlparse.parse_qs(environ['QUERY_STRING']) | |
|
65 | 65 | if 'cmd' in query: |
|
66 | 66 | is_hg_path = True |
|
67 | 67 | |
|
68 | 68 | log.debug( |
|
69 | 69 | 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'], |
|
70 | 70 | is_hg_path) |
|
71 | 71 | |
|
72 | 72 | return is_hg_path |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def is_svn(environ): |
|
76 | 76 | """ |
|
77 | 77 | Returns True if requests target is Subversion server |
|
78 | 78 | """ |
|
79 | 79 | |
|
80 | 80 | http_dav = environ.get('HTTP_DAV', '') |
|
81 | 81 | magic_path_segment = rhodecode.CONFIG.get( |
|
82 | 82 | 'rhodecode_subversion_magic_path', '/!svn') |
|
83 | 83 | is_svn_path = ( |
|
84 | 84 | 'subversion' in http_dav or |
|
85 | 85 | magic_path_segment in environ['PATH_INFO'] |
|
86 | 86 | or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH'] |
|
87 | 87 | ) |
|
88 | 88 | log.debug( |
|
89 | 89 | 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'], |
|
90 | 90 | is_svn_path) |
|
91 | 91 | |
|
92 | 92 | return is_svn_path |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | class GunzipMiddleware(object): |
|
96 | 96 | """ |
|
97 | 97 | WSGI middleware that unzips gzip-encoded requests before |
|
98 | 98 | passing on to the underlying application. |
|
99 | 99 | """ |
|
100 | 100 | |
|
101 | 101 | def __init__(self, application): |
|
102 | 102 | self.app = application |
|
103 | 103 | |
|
104 | 104 | def __call__(self, environ, start_response): |
|
105 | 105 | accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'') |
|
106 | 106 | |
|
107 | 107 | if b'gzip' in accepts_encoding_header: |
|
108 | 108 | log.debug('gzip detected, now running gunzip wrapper') |
|
109 | 109 | wsgi_input = environ['wsgi.input'] |
|
110 | 110 | |
|
111 | 111 | if not hasattr(environ['wsgi.input'], 'seek'): |
|
112 | 112 | # The gzip implementation in the standard library of Python 2.x |
|
113 | 113 | # requires the '.seek()' and '.tell()' methods to be available |
|
114 | 114 | # on the input stream. Read the data into a temporary file to |
|
115 | 115 | # work around this limitation. |
|
116 | 116 | |
|
117 | 117 | wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024) |
|
118 | 118 | shutil.copyfileobj(environ['wsgi.input'], wsgi_input) |
|
119 | 119 | wsgi_input.seek(0) |
|
120 | 120 | |
|
121 | 121 | environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r') |
|
122 | 122 | # since we "Ungzipped" the content we say now it's no longer gzip |
|
123 | 123 | # content encoding |
|
124 | 124 | del environ['HTTP_CONTENT_ENCODING'] |
|
125 | 125 | |
|
126 | 126 | # content length has changes ? or i'm not sure |
|
127 | 127 | if 'CONTENT_LENGTH' in environ: |
|
128 | 128 | del environ['CONTENT_LENGTH'] |
|
129 | 129 | else: |
|
130 | 130 | log.debug('content not gzipped, gzipMiddleware passing ' |
|
131 | 131 | 'request further') |
|
132 | 132 | return self.app(environ, start_response) |
|
133 | 133 | |
|
134 | 134 | |
|
135 | 135 | def is_vcs_call(environ): |
|
136 | 136 | if VCS_TYPE_KEY in environ: |
|
137 | 137 | raw_type = environ[VCS_TYPE_KEY] |
|
138 | 138 | return raw_type and raw_type != VCS_TYPE_SKIP |
|
139 | 139 | return False |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | def get_path_elem(route_path): |
|
143 | 143 | if not route_path: |
|
144 | 144 | return None |
|
145 | 145 | |
|
146 | 146 | cleaned_route_path = route_path.lstrip('/') |
|
147 | 147 | if cleaned_route_path: |
|
148 | 148 | cleaned_route_path_elems = cleaned_route_path.split('/') |
|
149 | 149 | if cleaned_route_path_elems: |
|
150 | 150 | return cleaned_route_path_elems[0] |
|
151 | 151 | return None |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | def detect_vcs_request(environ, backends): |
|
155 | 155 | checks = { |
|
156 | 156 | 'hg': (is_hg, SimpleHg), |
|
157 | 157 | 'git': (is_git, SimpleGit), |
|
158 | 158 | 'svn': (is_svn, SimpleSvn), |
|
159 | 159 | } |
|
160 | 160 | handler = None |
|
161 | 161 | # List of path views first chunk we don't do any checks |
|
162 | 162 | white_list = [ |
|
163 | 163 | # e.g /_file_store/download |
|
164 | 164 | '_file_store', |
|
165 | 165 | |
|
166 | 166 | # static files no detection |
|
167 | 167 | '_static', |
|
168 | 168 | |
|
169 | 169 | # skip ops ping, status |
|
170 | 170 | '_admin/ops/ping', |
|
171 | 171 | '_admin/ops/status', |
|
172 | 172 | |
|
173 | 173 | # full channelstream connect should be VCS skipped |
|
174 | 174 | '_admin/channelstream/connect', |
|
175 | 175 | ] |
|
176 | 176 | |
|
177 | 177 | path_info = environ['PATH_INFO'] |
|
178 | 178 | |
|
179 | 179 | path_elem = get_path_elem(path_info) |
|
180 | 180 | |
|
181 | 181 | if path_elem in white_list: |
|
182 | 182 | log.debug('path `%s` in whitelist, skipping...', path_info) |
|
183 | 183 | return handler |
|
184 | 184 | |
|
185 | 185 | path_url = path_info.lstrip('/') |
|
186 | 186 | if path_url in white_list: |
|
187 | 187 | log.debug('full url path `%s` in whitelist, skipping...', path_url) |
|
188 | 188 | return handler |
|
189 | 189 | |
|
190 | 190 | if VCS_TYPE_KEY in environ: |
|
191 | 191 | raw_type = environ[VCS_TYPE_KEY] |
|
192 | 192 | if raw_type == VCS_TYPE_SKIP: |
|
193 | 193 | log.debug('got `skip` marker for vcs detection, skipping...') |
|
194 | 194 | return handler |
|
195 | 195 | |
|
196 | 196 | _check, handler = checks.get(raw_type) or [None, None] |
|
197 | 197 | if handler: |
|
198 | 198 | log.debug('got handler:%s from environ', handler) |
|
199 | 199 | |
|
200 | 200 | if not handler: |
|
201 | 201 | log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends) |
|
202 | 202 | for vcs_type in backends: |
|
203 | 203 | vcs_check, _handler = checks[vcs_type] |
|
204 | 204 | if vcs_check(environ): |
|
205 | 205 | log.debug('vcs handler found %s', _handler) |
|
206 | 206 | handler = _handler |
|
207 | 207 | break |
|
208 | 208 | |
|
209 | 209 | return handler |
|
210 | 210 | |
|
211 | 211 | |
|
212 | 212 | class VCSMiddleware(object): |
|
213 | 213 | |
|
214 | 214 | def __init__(self, app, registry, config, appenlight_client): |
|
215 | 215 | self.application = app |
|
216 | 216 | self.registry = registry |
|
217 | 217 | self.config = config |
|
218 | 218 | self.appenlight_client = appenlight_client |
|
219 | 219 | self.use_gzip = True |
|
220 | 220 | # order in which we check the middlewares, based on vcs.backends config |
|
221 | 221 | self.check_middlewares = config['vcs.backends'] |
|
222 | 222 | |
|
223 | 223 | def vcs_config(self, repo_name=None): |
|
224 | 224 | """ |
|
225 | 225 | returns serialized VcsSettings |
|
226 | 226 | """ |
|
227 | 227 | try: |
|
228 | 228 | return VcsSettingsModel( |
|
229 | 229 | repo=repo_name).get_ui_settings_as_config_obj() |
|
230 | 230 | except Exception: |
|
231 | 231 | pass |
|
232 | 232 | |
|
233 | 233 | def wrap_in_gzip_if_enabled(self, app, config): |
|
234 | 234 | if self.use_gzip: |
|
235 | 235 | app = GunzipMiddleware(app) |
|
236 | 236 | return app |
|
237 | 237 | |
|
238 | 238 | def _get_handler_app(self, environ): |
|
239 | 239 | app = None |
|
240 | 240 | log.debug('VCSMiddleware: detecting vcs type.') |
|
241 | 241 | handler = detect_vcs_request(environ, self.check_middlewares) |
|
242 | 242 | if handler: |
|
243 | 243 | app = handler(self.config, self.registry) |
|
244 | 244 | |
|
245 | 245 | return app |
|
246 | 246 | |
|
247 | 247 | def __call__(self, environ, start_response): |
|
248 | 248 | # check if we handle one of interesting protocols, optionally extract |
|
249 | 249 | # specific vcsSettings and allow changes of how things are wrapped |
|
250 | 250 | vcs_handler = self._get_handler_app(environ) |
|
251 | 251 | if vcs_handler: |
|
252 | 252 | # translate the _REPO_ID into real repo NAME for usage |
|
253 | 253 | # in middleware |
|
254 | 254 | environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO']) |
|
255 | 255 | |
|
256 | 256 | # Set acl, url and vcs repo names. |
|
257 | 257 | vcs_handler.set_repo_names(environ) |
|
258 | 258 | |
|
259 | 259 | # register repo config back to the handler |
|
260 | 260 | vcs_conf = self.vcs_config(vcs_handler.acl_repo_name) |
|
261 | 261 | # maybe damaged/non existent settings. We still want to |
|
262 | 262 | # pass that point to validate on is_valid_and_existing_repo |
|
263 | 263 | # and return proper HTTP Code back to client |
|
264 | 264 | if vcs_conf: |
|
265 | 265 | vcs_handler.repo_vcs_config = vcs_conf |
|
266 | 266 | |
|
267 | 267 | # check for type, presence in database and on filesystem |
|
268 | 268 | if not vcs_handler.is_valid_and_existing_repo( |
|
269 | 269 | vcs_handler.acl_repo_name, |
|
270 | 270 | vcs_handler.base_path, |
|
271 | 271 | vcs_handler.SCM): |
|
272 | 272 | return HTTPNotFound()(environ, start_response) |
|
273 | 273 | |
|
274 | 274 | environ['REPO_NAME'] = vcs_handler.url_repo_name |
|
275 | 275 | |
|
276 | 276 | # Wrap handler in middlewares if they are enabled. |
|
277 | 277 | vcs_handler = self.wrap_in_gzip_if_enabled( |
|
278 | 278 | vcs_handler, self.config) |
|
279 | 279 | vcs_handler, _ = wrap_in_appenlight_if_enabled( |
|
280 | 280 | vcs_handler, self.config, self.appenlight_client) |
|
281 | 281 | |
|
282 | 282 | return vcs_handler(environ, start_response) |
|
283 | 283 | |
|
284 | 284 | return self.application(environ, start_response) |
@@ -1,412 +1,412 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Client for the VCSServer implemented based on HTTP. |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import copy |
|
26 | 26 | import logging |
|
27 | 27 | import threading |
|
28 | 28 | import time |
|
29 | 29 | import urllib.request, urllib.error, urllib.parse |
|
30 | import urlparse | |
|
30 | import urllib.parse | |
|
31 | 31 | import uuid |
|
32 | 32 | import traceback |
|
33 | 33 | |
|
34 | 34 | import pycurl |
|
35 | 35 | import msgpack |
|
36 | 36 | import requests |
|
37 | 37 | from requests.packages.urllib3.util.retry import Retry |
|
38 | 38 | |
|
39 | 39 | import rhodecode |
|
40 | 40 | from rhodecode.lib import rc_cache |
|
41 | 41 | from rhodecode.lib.rc_cache.utils import compute_key_from_params |
|
42 | 42 | from rhodecode.lib.system_info import get_cert_path |
|
43 | 43 | from rhodecode.lib.vcs import exceptions, CurlSession |
|
44 | 44 | from rhodecode.lib.utils2 import str2bool |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | # TODO: mikhail: Keep it in sync with vcsserver's |
|
50 | 50 | # HTTPApplication.ALLOWED_EXCEPTIONS |
|
51 | 51 | EXCEPTIONS_MAP = { |
|
52 | 52 | 'KeyError': KeyError, |
|
53 | 53 | 'URLError': urllib.error.URLError, |
|
54 | 54 | } |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | def _remote_call(url, payload, exceptions_map, session): |
|
58 | 58 | try: |
|
59 | 59 | headers = { |
|
60 | 60 | 'X-RC-Method': payload.get('method'), |
|
61 | 61 | 'X-RC-Repo-Name': payload.get('_repo_name') |
|
62 | 62 | } |
|
63 | 63 | response = session.post(url, data=msgpack.packb(payload), headers=headers) |
|
64 | 64 | except pycurl.error as e: |
|
65 | 65 | msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) |
|
66 | 66 | raise exceptions.HttpVCSCommunicationError(msg) |
|
67 | 67 | except Exception as e: |
|
68 | 68 | message = getattr(e, 'message', '') |
|
69 | 69 | if 'Failed to connect' in message: |
|
70 | 70 | # gevent doesn't return proper pycurl errors |
|
71 | 71 | raise exceptions.HttpVCSCommunicationError(e) |
|
72 | 72 | else: |
|
73 | 73 | raise |
|
74 | 74 | |
|
75 | 75 | if response.status_code >= 400: |
|
76 | 76 | log.error('Call to %s returned non 200 HTTP code: %s', |
|
77 | 77 | url, response.status_code) |
|
78 | 78 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) |
|
79 | 79 | |
|
80 | 80 | try: |
|
81 | 81 | response = msgpack.unpackb(response.content) |
|
82 | 82 | except Exception: |
|
83 | 83 | log.exception('Failed to decode response %r', response.content) |
|
84 | 84 | raise |
|
85 | 85 | |
|
86 | 86 | error = response.get('error') |
|
87 | 87 | if error: |
|
88 | 88 | type_ = error.get('type', 'Exception') |
|
89 | 89 | exc = exceptions_map.get(type_, Exception) |
|
90 | 90 | exc = exc(error.get('message')) |
|
91 | 91 | try: |
|
92 | 92 | exc._vcs_kind = error['_vcs_kind'] |
|
93 | 93 | except KeyError: |
|
94 | 94 | pass |
|
95 | 95 | |
|
96 | 96 | try: |
|
97 | 97 | exc._vcs_server_traceback = error['traceback'] |
|
98 | 98 | exc._vcs_server_org_exc_name = error['org_exc'] |
|
99 | 99 | exc._vcs_server_org_exc_tb = error['org_exc_tb'] |
|
100 | 100 | except KeyError: |
|
101 | 101 | pass |
|
102 | 102 | |
|
103 | 103 | raise exc |
|
104 | 104 | return response.get('result') |
|
105 | 105 | |
|
106 | 106 | |
|
107 | 107 | def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size): |
|
108 | 108 | try: |
|
109 | 109 | headers = { |
|
110 | 110 | 'X-RC-Method': payload.get('method'), |
|
111 | 111 | 'X-RC-Repo-Name': payload.get('_repo_name') |
|
112 | 112 | } |
|
113 | 113 | response = session.post(url, data=msgpack.packb(payload), headers=headers) |
|
114 | 114 | except pycurl.error as e: |
|
115 | 115 | msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) |
|
116 | 116 | raise exceptions.HttpVCSCommunicationError(msg) |
|
117 | 117 | except Exception as e: |
|
118 | 118 | message = getattr(e, 'message', '') |
|
119 | 119 | if 'Failed to connect' in message: |
|
120 | 120 | # gevent doesn't return proper pycurl errors |
|
121 | 121 | raise exceptions.HttpVCSCommunicationError(e) |
|
122 | 122 | else: |
|
123 | 123 | raise |
|
124 | 124 | |
|
125 | 125 | if response.status_code >= 400: |
|
126 | 126 | log.error('Call to %s returned non 200 HTTP code: %s', |
|
127 | 127 | url, response.status_code) |
|
128 | 128 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) |
|
129 | 129 | |
|
130 | 130 | return response.iter_content(chunk_size=chunk_size) |
|
131 | 131 | |
|
132 | 132 | |
|
133 | 133 | class ServiceConnection(object): |
|
134 | 134 | def __init__(self, server_and_port, backend_endpoint, session_factory): |
|
135 | self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
|
135 | self.url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
|
136 | 136 | self._session_factory = session_factory |
|
137 | 137 | |
|
138 | 138 | def __getattr__(self, name): |
|
139 | 139 | def f(*args, **kwargs): |
|
140 | 140 | return self._call(name, *args, **kwargs) |
|
141 | 141 | return f |
|
142 | 142 | |
|
143 | 143 | @exceptions.map_vcs_exceptions |
|
144 | 144 | def _call(self, name, *args, **kwargs): |
|
145 | 145 | payload = { |
|
146 | 146 | 'id': str(uuid.uuid4()), |
|
147 | 147 | 'method': name, |
|
148 | 148 | 'params': {'args': args, 'kwargs': kwargs} |
|
149 | 149 | } |
|
150 | 150 | return _remote_call( |
|
151 | 151 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | class RemoteVCSMaker(object): |
|
155 | 155 | |
|
156 | 156 | def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): |
|
157 | self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
|
158 | self.stream_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream') | |
|
157 | self.url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
|
158 | self.stream_url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream') | |
|
159 | 159 | |
|
160 | 160 | self._session_factory = session_factory |
|
161 | 161 | self.backend_type = backend_type |
|
162 | 162 | |
|
163 | 163 | @classmethod |
|
164 | 164 | def init_cache_region(cls, repo_id): |
|
165 | 165 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
166 | 166 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
167 | 167 | return region, cache_namespace_uid |
|
168 | 168 | |
|
169 | 169 | def __call__(self, path, repo_id, config, with_wire=None): |
|
170 | 170 | log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path) |
|
171 | 171 | return RemoteRepo(path, repo_id, config, self, with_wire=with_wire) |
|
172 | 172 | |
|
173 | 173 | def __getattr__(self, name): |
|
174 | 174 | def remote_attr(*args, **kwargs): |
|
175 | 175 | return self._call(name, *args, **kwargs) |
|
176 | 176 | return remote_attr |
|
177 | 177 | |
|
178 | 178 | @exceptions.map_vcs_exceptions |
|
179 | 179 | def _call(self, func_name, *args, **kwargs): |
|
180 | 180 | payload = { |
|
181 | 181 | 'id': str(uuid.uuid4()), |
|
182 | 182 | 'method': func_name, |
|
183 | 183 | 'backend': self.backend_type, |
|
184 | 184 | 'params': {'args': args, 'kwargs': kwargs} |
|
185 | 185 | } |
|
186 | 186 | url = self.url |
|
187 | 187 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
188 | 188 | |
|
189 | 189 | |
|
190 | 190 | class RemoteRepo(object): |
|
191 | 191 | CHUNK_SIZE = 16384 |
|
192 | 192 | |
|
193 | 193 | def __init__(self, path, repo_id, config, remote_maker, with_wire=None): |
|
194 | 194 | self.url = remote_maker.url |
|
195 | 195 | self.stream_url = remote_maker.stream_url |
|
196 | 196 | self._session = remote_maker._session_factory() |
|
197 | 197 | |
|
198 | 198 | cache_repo_id = self._repo_id_sanitizer(repo_id) |
|
199 | 199 | _repo_name = self._get_repo_name(config, path) |
|
200 | 200 | self._cache_region, self._cache_namespace = \ |
|
201 | 201 | remote_maker.init_cache_region(cache_repo_id) |
|
202 | 202 | |
|
203 | 203 | with_wire = with_wire or {} |
|
204 | 204 | |
|
205 | 205 | repo_state_uid = with_wire.get('repo_state_uid') or 'state' |
|
206 | 206 | |
|
207 | 207 | self._wire = { |
|
208 | 208 | "_repo_name": _repo_name, |
|
209 | 209 | "path": path, # repo path |
|
210 | 210 | "repo_id": repo_id, |
|
211 | 211 | "cache_repo_id": cache_repo_id, |
|
212 | 212 | "config": config, |
|
213 | 213 | "repo_state_uid": repo_state_uid, |
|
214 | 214 | "context": self._create_vcs_cache_context(path, repo_state_uid) |
|
215 | 215 | } |
|
216 | 216 | |
|
217 | 217 | if with_wire: |
|
218 | 218 | self._wire.update(with_wire) |
|
219 | 219 | |
|
220 | 220 | # NOTE(johbo): Trading complexity for performance. Avoiding the call to |
|
221 | 221 | # log.debug brings a few percent gain even if is is not active. |
|
222 | 222 | if log.isEnabledFor(logging.DEBUG): |
|
223 | 223 | self._call_with_logging = True |
|
224 | 224 | |
|
225 | 225 | self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) |
|
226 | 226 | |
|
227 | 227 | def _get_repo_name(self, config, path): |
|
228 | 228 | repo_store = config.get('paths', '/') |
|
229 | 229 | return path.split(repo_store)[-1].lstrip('/') |
|
230 | 230 | |
|
231 | 231 | def _repo_id_sanitizer(self, repo_id): |
|
232 | 232 | pathless = repo_id.replace('/', '__').replace('-', '_') |
|
233 | 233 | return ''.join(char if ord(char) < 128 else '_{}_'.format(ord(char)) for char in pathless) |
|
234 | 234 | |
|
235 | 235 | def __getattr__(self, name): |
|
236 | 236 | |
|
237 | 237 | if name.startswith('stream:'): |
|
238 | 238 | def repo_remote_attr(*args, **kwargs): |
|
239 | 239 | return self._call_stream(name, *args, **kwargs) |
|
240 | 240 | else: |
|
241 | 241 | def repo_remote_attr(*args, **kwargs): |
|
242 | 242 | return self._call(name, *args, **kwargs) |
|
243 | 243 | |
|
244 | 244 | return repo_remote_attr |
|
245 | 245 | |
|
246 | 246 | def _base_call(self, name, *args, **kwargs): |
|
247 | 247 | # TODO: oliver: This is currently necessary pre-call since the |
|
248 | 248 | # config object is being changed for hooking scenarios |
|
249 | 249 | wire = copy.deepcopy(self._wire) |
|
250 | 250 | wire["config"] = wire["config"].serialize() |
|
251 | 251 | wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) |
|
252 | 252 | |
|
253 | 253 | payload = { |
|
254 | 254 | 'id': str(uuid.uuid4()), |
|
255 | 255 | 'method': name, |
|
256 | 256 | "_repo_name": wire['_repo_name'], |
|
257 | 257 | 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} |
|
258 | 258 | } |
|
259 | 259 | |
|
260 | 260 | context_uid = wire.get('context') |
|
261 | 261 | return context_uid, payload |
|
262 | 262 | |
|
263 | 263 | def get_local_cache(self, name, args): |
|
264 | 264 | cache_on = False |
|
265 | 265 | cache_key = '' |
|
266 | 266 | local_cache_on = str2bool(rhodecode.CONFIG.get('vcs.methods.cache')) |
|
267 | 267 | |
|
268 | 268 | cache_methods = [ |
|
269 | 269 | 'branches', 'tags', 'bookmarks', |
|
270 | 270 | 'is_large_file', 'is_binary', |
|
271 | 271 | 'fctx_size', 'stream:fctx_node_data', 'blob_raw_length', |
|
272 | 272 | 'node_history', |
|
273 | 273 | 'revision', 'tree_items', |
|
274 | 274 | 'ctx_list', 'ctx_branch', 'ctx_description', |
|
275 | 275 | 'bulk_request', |
|
276 | 276 | 'assert_correct_path' |
|
277 | 277 | ] |
|
278 | 278 | |
|
279 | 279 | if local_cache_on and name in cache_methods: |
|
280 | 280 | cache_on = True |
|
281 | 281 | repo_state_uid = self._wire['repo_state_uid'] |
|
282 | 282 | call_args = [a for a in args] |
|
283 | 283 | cache_key = compute_key_from_params(repo_state_uid, name, *call_args) |
|
284 | 284 | |
|
285 | 285 | return cache_on, cache_key |
|
286 | 286 | |
|
287 | 287 | @exceptions.map_vcs_exceptions |
|
288 | 288 | def _call(self, name, *args, **kwargs): |
|
289 | 289 | context_uid, payload = self._base_call(name, *args, **kwargs) |
|
290 | 290 | url = self.url |
|
291 | 291 | |
|
292 | 292 | start = time.time() |
|
293 | 293 | cache_on, cache_key = self.get_local_cache(name, args) |
|
294 | 294 | |
|
295 | 295 | @self._cache_region.conditional_cache_on_arguments( |
|
296 | 296 | namespace=self._cache_namespace, condition=cache_on and cache_key) |
|
297 | 297 | def remote_call(_cache_key): |
|
298 | 298 | if self._call_with_logging: |
|
299 | 299 | log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s', |
|
300 | 300 | url, name, args, context_uid, cache_on) |
|
301 | 301 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session) |
|
302 | 302 | |
|
303 | 303 | result = remote_call(cache_key) |
|
304 | 304 | if self._call_with_logging: |
|
305 | 305 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', |
|
306 | 306 | url, name, time.time()-start, context_uid) |
|
307 | 307 | return result |
|
308 | 308 | |
|
309 | 309 | @exceptions.map_vcs_exceptions |
|
310 | 310 | def _call_stream(self, name, *args, **kwargs): |
|
311 | 311 | context_uid, payload = self._base_call(name, *args, **kwargs) |
|
312 | 312 | payload['chunk_size'] = self.CHUNK_SIZE |
|
313 | 313 | url = self.stream_url |
|
314 | 314 | |
|
315 | 315 | start = time.time() |
|
316 | 316 | cache_on, cache_key = self.get_local_cache(name, args) |
|
317 | 317 | |
|
318 | 318 | # Cache is a problem because this is a stream |
|
319 | 319 | def streaming_remote_call(_cache_key): |
|
320 | 320 | if self._call_with_logging: |
|
321 | 321 | log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s', |
|
322 | 322 | url, name, args, context_uid, cache_on) |
|
323 | 323 | return _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session, self.CHUNK_SIZE) |
|
324 | 324 | |
|
325 | 325 | result = streaming_remote_call(cache_key) |
|
326 | 326 | if self._call_with_logging: |
|
327 | 327 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', |
|
328 | 328 | url, name, time.time()-start, context_uid) |
|
329 | 329 | return result |
|
330 | 330 | |
|
331 | 331 | def __getitem__(self, key): |
|
332 | 332 | return self.revision(key) |
|
333 | 333 | |
|
334 | 334 | def _create_vcs_cache_context(self, *args): |
|
335 | 335 | """ |
|
336 | 336 | Creates a unique string which is passed to the VCSServer on every |
|
337 | 337 | remote call. It is used as cache key in the VCSServer. |
|
338 | 338 | """ |
|
339 | 339 | hash_key = '-'.join(map(str, args)) |
|
340 | 340 | return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key)) |
|
341 | 341 | |
|
342 | 342 | def invalidate_vcs_cache(self): |
|
343 | 343 | """ |
|
344 | 344 | This invalidates the context which is sent to the VCSServer on every |
|
345 | 345 | call to a remote method. It forces the VCSServer to create a fresh |
|
346 | 346 | repository instance on the next call to a remote method. |
|
347 | 347 | """ |
|
348 | 348 | self._wire['context'] = str(uuid.uuid4()) |
|
349 | 349 | |
|
350 | 350 | |
|
351 | 351 | class VcsHttpProxy(object): |
|
352 | 352 | |
|
353 | 353 | CHUNK_SIZE = 16384 |
|
354 | 354 | |
|
355 | 355 | def __init__(self, server_and_port, backend_endpoint): |
|
356 | 356 | retries = Retry(total=5, connect=None, read=None, redirect=None) |
|
357 | 357 | |
|
358 | 358 | adapter = requests.adapters.HTTPAdapter(max_retries=retries) |
|
359 | self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
|
359 | self.base_url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
|
360 | 360 | self.session = requests.Session() |
|
361 | 361 | self.session.mount('http://', adapter) |
|
362 | 362 | |
|
363 | 363 | def handle(self, environment, input_data, *args, **kwargs): |
|
364 | 364 | data = { |
|
365 | 365 | 'environment': environment, |
|
366 | 366 | 'input_data': input_data, |
|
367 | 367 | 'args': args, |
|
368 | 368 | 'kwargs': kwargs |
|
369 | 369 | } |
|
370 | 370 | result = self.session.post( |
|
371 | 371 | self.base_url, msgpack.packb(data), stream=True) |
|
372 | 372 | return self._get_result(result) |
|
373 | 373 | |
|
374 | 374 | def _deserialize_and_raise(self, error): |
|
375 | 375 | exception = Exception(error['message']) |
|
376 | 376 | try: |
|
377 | 377 | exception._vcs_kind = error['_vcs_kind'] |
|
378 | 378 | except KeyError: |
|
379 | 379 | pass |
|
380 | 380 | raise exception |
|
381 | 381 | |
|
382 | 382 | def _iterate(self, result): |
|
383 | 383 | unpacker = msgpack.Unpacker() |
|
384 | 384 | for line in result.iter_content(chunk_size=self.CHUNK_SIZE): |
|
385 | 385 | unpacker.feed(line) |
|
386 | 386 | for chunk in unpacker: |
|
387 | 387 | yield chunk |
|
388 | 388 | |
|
389 | 389 | def _get_result(self, result): |
|
390 | 390 | iterator = self._iterate(result) |
|
391 | 391 | error = iterator.next() |
|
392 | 392 | if error: |
|
393 | 393 | self._deserialize_and_raise(error) |
|
394 | 394 | |
|
395 | 395 | status = iterator.next() |
|
396 | 396 | headers = iterator.next() |
|
397 | 397 | |
|
398 | 398 | return iterator, status, headers |
|
399 | 399 | |
|
400 | 400 | |
|
401 | 401 | class ThreadlocalSessionFactory(object): |
|
402 | 402 | """ |
|
403 | 403 | Creates one CurlSession per thread on demand. |
|
404 | 404 | """ |
|
405 | 405 | |
|
406 | 406 | def __init__(self): |
|
407 | 407 | self._thread_local = threading.local() |
|
408 | 408 | |
|
409 | 409 | def __call__(self): |
|
410 | 410 | if not hasattr(self._thread_local, 'curl_session'): |
|
411 | 411 | self._thread_local.curl_session = CurlSession() |
|
412 | 412 | return self._thread_local.curl_session |
@@ -1,59 +1,59 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | import deform.widget |
|
24 |
from deform.widget import null, OptGroup |
|
|
24 | from deform.widget import null, OptGroup | |
|
25 | 25 | |
|
26 | 26 | log = logging.getLogger(__name__) |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def _normalize_choices(values): |
|
30 | 30 | result = [] |
|
31 | 31 | for item in values: |
|
32 | 32 | if isinstance(item, OptGroup): |
|
33 | 33 | normalized_options = _normalize_choices(item.options) |
|
34 | 34 | result.append(OptGroup(item.label, *normalized_options)) |
|
35 | 35 | else: |
|
36 | 36 | value, description, help_block = item |
|
37 |
if not isinstance(value, str |
|
|
37 | if not isinstance(value, str): | |
|
38 | 38 | value = str(value) |
|
39 | 39 | result.append((value, description, help_block)) |
|
40 | 40 | return result |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class CodeMirrorWidget(deform.widget.TextAreaWidget): |
|
44 | 44 | template = 'codemirror' |
|
45 | 45 | requirements = (('deform', None), ('codemirror', None)) |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | class CheckboxChoiceWidgetDesc(deform.widget.CheckboxChoiceWidget): |
|
49 | 49 | template = "checkbox_choice_desc" |
|
50 | 50 | |
|
51 | 51 | def serialize(self, field, cstruct, **kw): |
|
52 | 52 | if cstruct in (null, None): |
|
53 | 53 | cstruct = () |
|
54 | 54 | readonly = kw.get("readonly", self.readonly) |
|
55 | 55 | values = kw.get("values", self.values) |
|
56 | 56 | kw["values"] = _normalize_choices(values) |
|
57 | 57 | template = readonly and self.readonly_template or self.template |
|
58 | 58 | tmpl_values = self.get_template_values(field, cstruct, kw) |
|
59 | 59 | return field.renderer(template, **tmpl_values) |
@@ -1,140 +1,140 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | import urlparse | |
|
22 | import urllib.parse | |
|
23 | 23 | import mock |
|
24 | 24 | import simplejson as json |
|
25 | 25 | |
|
26 | 26 | from rhodecode.lib.vcs.backends.base import Config |
|
27 | 27 | from rhodecode.tests.lib.middleware import mock_scm_app |
|
28 | 28 | import rhodecode.lib.middleware.simplegit as simplegit |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def get_environ(url, request_method): |
|
32 | 32 | """Construct a minimum WSGI environ based on the URL.""" |
|
33 | parsed_url = urlparse.urlparse(url) | |
|
33 | parsed_url = urllib.parse.urlparse.urlparse(url) | |
|
34 | 34 | environ = { |
|
35 | 35 | 'PATH_INFO': parsed_url.path, |
|
36 | 36 | 'QUERY_STRING': parsed_url.query, |
|
37 | 37 | 'REQUEST_METHOD': request_method, |
|
38 | 38 | } |
|
39 | 39 | |
|
40 | 40 | return environ |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | @pytest.mark.parametrize( |
|
44 | 44 | 'url, expected_action, request_method', |
|
45 | 45 | [ |
|
46 | 46 | ('/foo/bar/info/refs?service=git-upload-pack', 'pull', 'GET'), |
|
47 | 47 | ('/foo/bar/info/refs?service=git-receive-pack', 'push', 'GET'), |
|
48 | 48 | ('/foo/bar/git-upload-pack', 'pull', 'GET'), |
|
49 | 49 | ('/foo/bar/git-receive-pack', 'push', 'GET'), |
|
50 | 50 | # Edge case: missing data for info/refs |
|
51 | 51 | ('/foo/info/refs?service=', 'pull', 'GET'), |
|
52 | 52 | ('/foo/info/refs', 'pull', 'GET'), |
|
53 | 53 | # Edge case: git command comes with service argument |
|
54 | 54 | ('/foo/git-upload-pack?service=git-receive-pack', 'pull', 'GET'), |
|
55 | 55 | ('/foo/git-receive-pack?service=git-upload-pack', 'push', 'GET'), |
|
56 | 56 | # Edge case: repo name conflicts with git commands |
|
57 | 57 | ('/git-receive-pack/git-upload-pack', 'pull', 'GET'), |
|
58 | 58 | ('/git-receive-pack/git-receive-pack', 'push', 'GET'), |
|
59 | 59 | ('/git-upload-pack/git-upload-pack', 'pull', 'GET'), |
|
60 | 60 | ('/git-upload-pack/git-receive-pack', 'push', 'GET'), |
|
61 | 61 | ('/foo/git-receive-pack', 'push', 'GET'), |
|
62 | 62 | # Edge case: not a smart protocol url |
|
63 | 63 | ('/foo/bar', 'pull', 'GET'), |
|
64 | 64 | # GIT LFS cases, batch |
|
65 | 65 | ('/foo/bar/info/lfs/objects/batch', 'push', 'GET'), |
|
66 | 66 | ('/foo/bar/info/lfs/objects/batch', 'pull', 'POST'), |
|
67 | 67 | # GIT LFS oid, dl/upl |
|
68 | 68 | ('/foo/bar/info/lfs/abcdeabcde', 'pull', 'GET'), |
|
69 | 69 | ('/foo/bar/info/lfs/abcdeabcde', 'push', 'PUT'), |
|
70 | 70 | ('/foo/bar/info/lfs/abcdeabcde', 'push', 'POST'), |
|
71 | 71 | # Edge case: repo name conflicts with git commands |
|
72 | 72 | ('/info/lfs/info/lfs/objects/batch', 'push', 'GET'), |
|
73 | 73 | ('/info/lfs/info/lfs/objects/batch', 'pull', 'POST'), |
|
74 | 74 | |
|
75 | 75 | ]) |
|
76 | 76 | def test_get_action(url, expected_action, request_method, baseapp, request_stub): |
|
77 | 77 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, |
|
78 | 78 | registry=request_stub.registry) |
|
79 | 79 | assert expected_action == app._get_action(get_environ(url, request_method)) |
|
80 | 80 | |
|
81 | 81 | |
|
82 | 82 | @pytest.mark.parametrize( |
|
83 | 83 | 'url, expected_repo_name, request_method', |
|
84 | 84 | [ |
|
85 | 85 | ('/foo/info/refs?service=git-upload-pack', 'foo', 'GET'), |
|
86 | 86 | ('/foo/bar/info/refs?service=git-receive-pack', 'foo/bar', 'GET'), |
|
87 | 87 | ('/foo/git-upload-pack', 'foo', 'GET'), |
|
88 | 88 | ('/foo/git-receive-pack', 'foo', 'GET'), |
|
89 | 89 | ('/foo/bar/git-upload-pack', 'foo/bar', 'GET'), |
|
90 | 90 | ('/foo/bar/git-receive-pack', 'foo/bar', 'GET'), |
|
91 | 91 | |
|
92 | 92 | # GIT LFS cases, batch |
|
93 | 93 | ('/foo/bar/info/lfs/objects/batch', 'foo/bar', 'GET'), |
|
94 | 94 | ('/example-git/info/lfs/objects/batch', 'example-git', 'POST'), |
|
95 | 95 | # GIT LFS oid, dl/upl |
|
96 | 96 | ('/foo/info/lfs/abcdeabcde', 'foo', 'GET'), |
|
97 | 97 | ('/foo/bar/info/lfs/abcdeabcde', 'foo/bar', 'PUT'), |
|
98 | 98 | ('/my-git-repo/info/lfs/abcdeabcde', 'my-git-repo', 'POST'), |
|
99 | 99 | # Edge case: repo name conflicts with git commands |
|
100 | 100 | ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'GET'), |
|
101 | 101 | ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'POST'), |
|
102 | 102 | |
|
103 | 103 | ]) |
|
104 | 104 | def test_get_repository_name(url, expected_repo_name, request_method, baseapp, request_stub): |
|
105 | 105 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, |
|
106 | 106 | registry=request_stub.registry) |
|
107 | 107 | assert expected_repo_name == app._get_repository_name( |
|
108 | 108 | get_environ(url, request_method)) |
|
109 | 109 | |
|
110 | 110 | |
|
111 | 111 | def test_get_config(user_util, baseapp, request_stub): |
|
112 | 112 | repo = user_util.create_repo(repo_type='git') |
|
113 | 113 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, |
|
114 | 114 | registry=request_stub.registry) |
|
115 | 115 | extras = {'foo': 'FOO', 'bar': 'BAR'} |
|
116 | 116 | |
|
117 | 117 | # We copy the extras as the method below will change the contents. |
|
118 | 118 | git_config = app._create_config(dict(extras), repo_name=repo.repo_name) |
|
119 | 119 | |
|
120 | 120 | expected_config = dict(extras) |
|
121 | 121 | expected_config.update({ |
|
122 | 122 | 'git_update_server_info': False, |
|
123 | 123 | 'git_lfs_enabled': False, |
|
124 | 124 | 'git_lfs_store_path': git_config['git_lfs_store_path'], |
|
125 | 125 | 'git_lfs_http_scheme': 'http' |
|
126 | 126 | }) |
|
127 | 127 | |
|
128 | 128 | assert git_config == expected_config |
|
129 | 129 | |
|
130 | 130 | |
|
131 | 131 | def test_create_wsgi_app_uses_scm_app_from_simplevcs(baseapp, request_stub): |
|
132 | 132 | config = { |
|
133 | 133 | 'auth_ret_code': '', |
|
134 | 134 | 'base_path': '', |
|
135 | 135 | 'vcs.scm_app_implementation': |
|
136 | 136 | 'rhodecode.tests.lib.middleware.mock_scm_app', |
|
137 | 137 | } |
|
138 | 138 | app = simplegit.SimpleGit(config=config, registry=request_stub.registry) |
|
139 | 139 | wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {}) |
|
140 | 140 | assert wsgi_app is mock_scm_app.mock_git_wsgi |
@@ -1,156 +1,156 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import urlparse | |
|
21 | import urllib.parse | |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | import simplejson as json |
|
26 | 26 | |
|
27 | 27 | from rhodecode.lib.vcs.backends.base import Config |
|
28 | 28 | from rhodecode.tests.lib.middleware import mock_scm_app |
|
29 | 29 | import rhodecode.lib.middleware.simplehg as simplehg |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def get_environ(url): |
|
33 | 33 | """Construct a minimum WSGI environ based on the URL.""" |
|
34 | parsed_url = urlparse.urlparse(url) | |
|
34 | parsed_url = urllib.parse.urlparse.urlparse(url) | |
|
35 | 35 | environ = { |
|
36 | 36 | 'PATH_INFO': parsed_url.path, |
|
37 | 37 | 'QUERY_STRING': parsed_url.query, |
|
38 | 38 | } |
|
39 | 39 | |
|
40 | 40 | return environ |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | @pytest.mark.parametrize( |
|
44 | 44 | 'url, expected_action', |
|
45 | 45 | [ |
|
46 | 46 | ('/foo/bar?cmd=unbundle&key=tip', 'push'), |
|
47 | 47 | ('/foo/bar?cmd=pushkey&key=tip', 'push'), |
|
48 | 48 | ('/foo/bar?cmd=listkeys&key=tip', 'pull'), |
|
49 | 49 | ('/foo/bar?cmd=changegroup&key=tip', 'pull'), |
|
50 | 50 | ('/foo/bar?cmd=hello', 'pull'), |
|
51 | 51 | ('/foo/bar?cmd=batch', 'push'), |
|
52 | 52 | ('/foo/bar?cmd=putlfile', 'push'), |
|
53 | 53 | # Edge case: unknown argument: assume push |
|
54 | 54 | ('/foo/bar?cmd=unknown&key=tip', 'push'), |
|
55 | 55 | ('/foo/bar?cmd=&key=tip', 'push'), |
|
56 | 56 | # Edge case: not cmd argument |
|
57 | 57 | ('/foo/bar?key=tip', 'push'), |
|
58 | 58 | ]) |
|
59 | 59 | def test_get_action(url, expected_action, request_stub): |
|
60 | 60 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, |
|
61 | 61 | registry=request_stub.registry) |
|
62 | 62 | assert expected_action == app._get_action(get_environ(url)) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | @pytest.mark.parametrize( |
|
66 | 66 | 'environ, expected_xargs, expected_batch', |
|
67 | 67 | [ |
|
68 | 68 | ({}, |
|
69 | 69 | [''], ['push']), |
|
70 | 70 | |
|
71 | 71 | ({'HTTP_X_HGARG_1': ''}, |
|
72 | 72 | [''], ['push']), |
|
73 | 73 | |
|
74 | 74 | ({'HTTP_X_HGARG_1': 'cmds=listkeys+namespace%3Dphases'}, |
|
75 | 75 | ['listkeys namespace=phases'], ['pull']), |
|
76 | 76 | |
|
77 | 77 | ({'HTTP_X_HGARG_1': 'cmds=pushkey+namespace%3Dbookmarks%2Ckey%3Dbm%2Cold%3D%2Cnew%3Dcb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b'}, |
|
78 | 78 | ['pushkey namespace=bookmarks,key=bm,old=,new=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b'], ['push']), |
|
79 | 79 | |
|
80 | 80 | ({'HTTP_X_HGARG_1': 'namespace=phases'}, |
|
81 | 81 | ['namespace=phases'], ['push']), |
|
82 | 82 | |
|
83 | 83 | ]) |
|
84 | 84 | def test_xarg_and_batch_commands(environ, expected_xargs, expected_batch): |
|
85 | 85 | app = simplehg.SimpleHg |
|
86 | 86 | |
|
87 | 87 | result = app._get_xarg_headers(environ) |
|
88 | 88 | result_batch = app._get_batch_cmd(environ) |
|
89 | 89 | assert expected_xargs == result |
|
90 | 90 | assert expected_batch == result_batch |
|
91 | 91 | |
|
92 | 92 | |
|
93 | 93 | @pytest.mark.parametrize( |
|
94 | 94 | 'url, expected_repo_name', |
|
95 | 95 | [ |
|
96 | 96 | ('/foo?cmd=unbundle&key=tip', 'foo'), |
|
97 | 97 | ('/foo/bar?cmd=pushkey&key=tip', 'foo/bar'), |
|
98 | 98 | ('/foo/bar/baz?cmd=listkeys&key=tip', 'foo/bar/baz'), |
|
99 | 99 | # Repos with trailing slashes. |
|
100 | 100 | ('/foo/?cmd=unbundle&key=tip', 'foo'), |
|
101 | 101 | ('/foo/bar/?cmd=pushkey&key=tip', 'foo/bar'), |
|
102 | 102 | ('/foo/bar/baz/?cmd=listkeys&key=tip', 'foo/bar/baz'), |
|
103 | 103 | ]) |
|
104 | 104 | def test_get_repository_name(url, expected_repo_name, request_stub): |
|
105 | 105 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, |
|
106 | 106 | registry=request_stub.registry) |
|
107 | 107 | assert expected_repo_name == app._get_repository_name(get_environ(url)) |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | def test_get_config(user_util, baseapp, request_stub): |
|
111 | 111 | repo = user_util.create_repo(repo_type='git') |
|
112 | 112 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, |
|
113 | 113 | registry=request_stub.registry) |
|
114 | 114 | extras = [('foo', 'FOO', 'bar', 'BAR')] |
|
115 | 115 | |
|
116 | 116 | hg_config = app._create_config(extras, repo_name=repo.repo_name) |
|
117 | 117 | |
|
118 | 118 | config = simplehg.utils.make_db_config(repo=repo.repo_name) |
|
119 | 119 | config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
120 | 120 | hg_config_org = config |
|
121 | 121 | |
|
122 | 122 | expected_config = [ |
|
123 | 123 | ('vcs_svn_tag', 'ff89f8c714d135d865f44b90e5413b88de19a55f', '/tags/*'), |
|
124 | 124 | ('web', 'push_ssl', 'False'), |
|
125 | 125 | ('web', 'allow_push', '*'), |
|
126 | 126 | ('web', 'allow_archive', 'gz zip bz2'), |
|
127 | 127 | ('web', 'baseurl', '/'), |
|
128 | 128 | ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')), |
|
129 | 129 | ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'), |
|
130 | 130 | ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'), |
|
131 | 131 | ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')), |
|
132 | 132 | ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'), |
|
133 | 133 | ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), |
|
134 | 134 | ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'), |
|
135 | 135 | ('hooks', 'pretxnchangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), |
|
136 | 136 | ('hooks', 'changegroup.push_logger', 'python:vcsserver.hooks.log_push_action'), |
|
137 | 137 | ('hooks', 'changegroup.repo_size', 'python:vcsserver.hooks.repo_size'), |
|
138 | 138 | ('phases', 'publish', 'True'), |
|
139 | 139 | ('extensions', 'largefiles', ''), |
|
140 | 140 | ('paths', '/', hg_config_org.get('paths', '/')), |
|
141 | 141 | ('rhodecode', 'RC_SCM_DATA', '[["foo", "FOO", "bar", "BAR"]]') |
|
142 | 142 | ] |
|
143 | 143 | for entry in expected_config: |
|
144 | 144 | assert entry in hg_config |
|
145 | 145 | |
|
146 | 146 | |
|
147 | 147 | def test_create_wsgi_app_uses_scm_app_from_simplevcs(request_stub): |
|
148 | 148 | config = { |
|
149 | 149 | 'auth_ret_code': '', |
|
150 | 150 | 'base_path': '', |
|
151 | 151 | 'vcs.scm_app_implementation': |
|
152 | 152 | 'rhodecode.tests.lib.middleware.mock_scm_app', |
|
153 | 153 | } |
|
154 | 154 | app = simplehg.SimpleHg(config=config, registry=request_stub.registry) |
|
155 | 155 | wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {}) |
|
156 | 156 | assert wsgi_app is mock_scm_app.mock_hg_wsgi |
@@ -1,468 +1,468 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import threading |
|
22 | 22 | import time |
|
23 | 23 | import logging |
|
24 | 24 | import os.path |
|
25 | 25 | import subprocess32 |
|
26 | 26 | import tempfile |
|
27 | 27 | import urllib.request, urllib.error, urllib.parse |
|
28 | 28 | from lxml.html import fromstring, tostring |
|
29 | 29 | from lxml.cssselect import CSSSelector |
|
30 | from urlparse import urlparse, parse_qsl | |
|
30 | import urllib.parse.urlparse | |
|
31 | 31 | from urllib.parse import unquote_plus |
|
32 | 32 | import webob |
|
33 | 33 | |
|
34 |
from webtest.app import TestResponse, TestApp |
|
|
34 | from webtest.app import TestResponse, TestApp | |
|
35 | 35 | from webtest.compat import print_stderr |
|
36 | 36 | |
|
37 | 37 | import pytest |
|
38 | 38 | import rc_testdata |
|
39 | 39 | |
|
40 | 40 | from rhodecode.model.db import User, Repository |
|
41 | 41 | from rhodecode.model.meta import Session |
|
42 | 42 | from rhodecode.model.scm import ScmModel |
|
43 | 43 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository |
|
44 | 44 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
45 | 45 | from rhodecode.tests import login_user_session |
|
46 | 46 | |
|
47 | 47 | log = logging.getLogger(__name__) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class CustomTestResponse(TestResponse): |
|
51 | 51 | |
|
52 | 52 | def _save_output(self, out): |
|
53 | 53 | f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html') |
|
54 | 54 | f.write(out) |
|
55 | 55 | return f.name |
|
56 | 56 | |
|
57 | 57 | def mustcontain(self, *strings, **kw): |
|
58 | 58 | """ |
|
59 | 59 | Assert that the response contains all of the strings passed |
|
60 | 60 | in as arguments. |
|
61 | 61 | |
|
62 | 62 | Equivalent to:: |
|
63 | 63 | |
|
64 | 64 | assert string in res |
|
65 | 65 | """ |
|
66 | 66 | print_body = kw.pop('print_body', False) |
|
67 | 67 | if 'no' in kw: |
|
68 | 68 | no = kw['no'] |
|
69 | 69 | del kw['no'] |
|
70 |
if isinstance(no, str |
|
|
70 | if isinstance(no, str): | |
|
71 | 71 | no = [no] |
|
72 | 72 | else: |
|
73 | 73 | no = [] |
|
74 | 74 | if kw: |
|
75 | 75 | raise TypeError( |
|
76 | 76 | "The only keyword argument allowed is 'no' got %s" % kw) |
|
77 | 77 | |
|
78 | 78 | f = self._save_output(str(self)) |
|
79 | 79 | |
|
80 | 80 | for s in strings: |
|
81 | 81 | if not s in self: |
|
82 | 82 | print_stderr("Actual response (no %r):" % s) |
|
83 | 83 | print_stderr("body output saved as `%s`" % f) |
|
84 | 84 | if print_body: |
|
85 | 85 | print_stderr(str(self)) |
|
86 | 86 | raise IndexError( |
|
87 | 87 | "Body does not contain string %r, body output saved as %s" % (s, f)) |
|
88 | 88 | |
|
89 | 89 | for no_s in no: |
|
90 | 90 | if no_s in self: |
|
91 | 91 | print_stderr("Actual response (has %r)" % no_s) |
|
92 | 92 | print_stderr("body output saved as `%s`" % f) |
|
93 | 93 | if print_body: |
|
94 | 94 | print_stderr(str(self)) |
|
95 | 95 | raise IndexError( |
|
96 | 96 | "Body contains bad string %r, body output saved as %s" % (no_s, f)) |
|
97 | 97 | |
|
98 | 98 | def assert_response(self): |
|
99 | 99 | return AssertResponse(self) |
|
100 | 100 | |
|
101 | 101 | def get_session_from_response(self): |
|
102 | 102 | """ |
|
103 | 103 | This returns the session from a response object. |
|
104 | 104 | """ |
|
105 | 105 | from rhodecode.lib.rc_beaker import session_factory_from_settings |
|
106 | 106 | session = session_factory_from_settings(self.test_app._pyramid_settings) |
|
107 | 107 | return session(self.request) |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | class TestRequest(webob.BaseRequest): |
|
111 | 111 | |
|
112 | 112 | # for py.test |
|
113 | 113 | disabled = True |
|
114 | 114 | ResponseClass = CustomTestResponse |
|
115 | 115 | |
|
116 | 116 | def add_response_callback(self, callback): |
|
117 | 117 | pass |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | class CustomTestApp(TestApp): |
|
121 | 121 | """ |
|
122 | 122 | Custom app to make mustcontain more Useful, and extract special methods |
|
123 | 123 | """ |
|
124 | 124 | RequestClass = TestRequest |
|
125 | 125 | rc_login_data = {} |
|
126 | 126 | rc_current_session = None |
|
127 | 127 | |
|
128 | 128 | def login(self, username=None, password=None): |
|
129 | 129 | from rhodecode.lib import auth |
|
130 | 130 | |
|
131 | 131 | if username and password: |
|
132 | 132 | session = login_user_session(self, username, password) |
|
133 | 133 | else: |
|
134 | 134 | session = login_user_session(self) |
|
135 | 135 | |
|
136 | 136 | self.rc_login_data['csrf_token'] = auth.get_csrf_token(session) |
|
137 | 137 | self.rc_current_session = session |
|
138 | 138 | return session['rhodecode_user'] |
|
139 | 139 | |
|
140 | 140 | @property |
|
141 | 141 | def csrf_token(self): |
|
142 | 142 | return self.rc_login_data['csrf_token'] |
|
143 | 143 | |
|
144 | 144 | @property |
|
145 | 145 | def _pyramid_registry(self): |
|
146 | 146 | return self.app.config.registry |
|
147 | 147 | |
|
148 | 148 | @property |
|
149 | 149 | def _pyramid_settings(self): |
|
150 | 150 | return self._pyramid_registry.settings |
|
151 | 151 | |
|
152 | 152 | |
|
153 | 153 | def set_anonymous_access(enabled): |
|
154 | 154 | """(Dis)allows anonymous access depending on parameter `enabled`""" |
|
155 | 155 | user = User.get_default_user() |
|
156 | 156 | user.active = enabled |
|
157 | 157 | Session().add(user) |
|
158 | 158 | Session().commit() |
|
159 | 159 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
160 | 160 | log.info('anonymous access is now: %s', enabled) |
|
161 | 161 | assert enabled == User.get_default_user().active, ( |
|
162 | 162 | 'Cannot set anonymous access') |
|
163 | 163 | |
|
164 | 164 | |
|
165 | 165 | def check_xfail_backends(node, backend_alias): |
|
166 | 166 | # Using "xfail_backends" here intentionally, since this marks work |
|
167 | 167 | # which is "to be done" soon. |
|
168 | 168 | skip_marker = node.get_closest_marker('xfail_backends') |
|
169 | 169 | if skip_marker and backend_alias in skip_marker.args: |
|
170 | 170 | msg = "Support for backend %s to be developed." % (backend_alias, ) |
|
171 | 171 | msg = skip_marker.kwargs.get('reason', msg) |
|
172 | 172 | pytest.xfail(msg) |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | def check_skip_backends(node, backend_alias): |
|
176 | 176 | # Using "skip_backends" here intentionally, since this marks work which is |
|
177 | 177 | # not supported. |
|
178 | 178 | skip_marker = node.get_closest_marker('skip_backends') |
|
179 | 179 | if skip_marker and backend_alias in skip_marker.args: |
|
180 | 180 | msg = "Feature not supported for backend %s." % (backend_alias, ) |
|
181 | 181 | msg = skip_marker.kwargs.get('reason', msg) |
|
182 | 182 | pytest.skip(msg) |
|
183 | 183 | |
|
184 | 184 | |
|
185 | 185 | def extract_git_repo_from_dump(dump_name, repo_name): |
|
186 | 186 | """Create git repo `repo_name` from dump `dump_name`.""" |
|
187 | 187 | repos_path = ScmModel().repos_path |
|
188 | 188 | target_path = os.path.join(repos_path, repo_name) |
|
189 | 189 | rc_testdata.extract_git_dump(dump_name, target_path) |
|
190 | 190 | return target_path |
|
191 | 191 | |
|
192 | 192 | |
|
193 | 193 | def extract_hg_repo_from_dump(dump_name, repo_name): |
|
194 | 194 | """Create hg repo `repo_name` from dump `dump_name`.""" |
|
195 | 195 | repos_path = ScmModel().repos_path |
|
196 | 196 | target_path = os.path.join(repos_path, repo_name) |
|
197 | 197 | rc_testdata.extract_hg_dump(dump_name, target_path) |
|
198 | 198 | return target_path |
|
199 | 199 | |
|
200 | 200 | |
|
201 | 201 | def extract_svn_repo_from_dump(dump_name, repo_name): |
|
202 | 202 | """Create a svn repo `repo_name` from dump `dump_name`.""" |
|
203 | 203 | repos_path = ScmModel().repos_path |
|
204 | 204 | target_path = os.path.join(repos_path, repo_name) |
|
205 | 205 | SubversionRepository(target_path, create=True) |
|
206 | 206 | _load_svn_dump_into_repo(dump_name, target_path) |
|
207 | 207 | return target_path |
|
208 | 208 | |
|
209 | 209 | |
|
210 | 210 | def assert_message_in_log(log_records, message, levelno, module): |
|
211 | 211 | messages = [ |
|
212 | 212 | r.message for r in log_records |
|
213 | 213 | if r.module == module and r.levelno == levelno |
|
214 | 214 | ] |
|
215 | 215 | assert message in messages |
|
216 | 216 | |
|
217 | 217 | |
|
218 | 218 | def _load_svn_dump_into_repo(dump_name, repo_path): |
|
219 | 219 | """ |
|
220 | 220 | Utility to populate a svn repository with a named dump |
|
221 | 221 | |
|
222 | 222 | Currently the dumps are in rc_testdata. They might later on be |
|
223 | 223 | integrated with the main repository once they stabilize more. |
|
224 | 224 | """ |
|
225 | 225 | dump = rc_testdata.load_svn_dump(dump_name) |
|
226 | 226 | load_dump = subprocess32.Popen( |
|
227 | 227 | ['svnadmin', 'load', repo_path], |
|
228 | 228 | stdin=subprocess32.PIPE, stdout=subprocess32.PIPE, |
|
229 | 229 | stderr=subprocess32.PIPE) |
|
230 | 230 | out, err = load_dump.communicate(dump) |
|
231 | 231 | if load_dump.returncode != 0: |
|
232 | 232 | log.error("Output of load_dump command: %s", out) |
|
233 | 233 | log.error("Error output of load_dump command: %s", err) |
|
234 | 234 | raise Exception( |
|
235 | 235 | 'Failed to load dump "%s" into repository at path "%s".' |
|
236 | 236 | % (dump_name, repo_path)) |
|
237 | 237 | |
|
238 | 238 | |
|
239 | 239 | class AssertResponse(object): |
|
240 | 240 | """ |
|
241 | 241 | Utility that helps to assert things about a given HTML response. |
|
242 | 242 | """ |
|
243 | 243 | |
|
244 | 244 | def __init__(self, response): |
|
245 | 245 | self.response = response |
|
246 | 246 | |
|
247 | 247 | def get_imports(self): |
|
248 | 248 | return fromstring, tostring, CSSSelector |
|
249 | 249 | |
|
250 | 250 | def one_element_exists(self, css_selector): |
|
251 | 251 | self.get_element(css_selector) |
|
252 | 252 | |
|
253 | 253 | def no_element_exists(self, css_selector): |
|
254 | 254 | assert not self._get_elements(css_selector) |
|
255 | 255 | |
|
256 | 256 | def element_equals_to(self, css_selector, expected_content): |
|
257 | 257 | element = self.get_element(css_selector) |
|
258 | 258 | element_text = self._element_to_string(element) |
|
259 | 259 | assert expected_content in element_text |
|
260 | 260 | |
|
261 | 261 | def element_contains(self, css_selector, expected_content): |
|
262 | 262 | element = self.get_element(css_selector) |
|
263 | 263 | assert expected_content in element.text_content() |
|
264 | 264 | |
|
265 | 265 | def element_value_contains(self, css_selector, expected_content): |
|
266 | 266 | element = self.get_element(css_selector) |
|
267 | 267 | assert expected_content in element.value |
|
268 | 268 | |
|
269 | 269 | def contains_one_link(self, link_text, href): |
|
270 | 270 | fromstring, tostring, CSSSelector = self.get_imports() |
|
271 | 271 | doc = fromstring(self.response.body) |
|
272 | 272 | sel = CSSSelector('a[href]') |
|
273 | 273 | elements = [ |
|
274 | 274 | e for e in sel(doc) if e.text_content().strip() == link_text] |
|
275 | 275 | assert len(elements) == 1, "Did not find link or found multiple links" |
|
276 | 276 | self._ensure_url_equal(elements[0].attrib.get('href'), href) |
|
277 | 277 | |
|
278 | 278 | def contains_one_anchor(self, anchor_id): |
|
279 | 279 | fromstring, tostring, CSSSelector = self.get_imports() |
|
280 | 280 | doc = fromstring(self.response.body) |
|
281 | 281 | sel = CSSSelector('#' + anchor_id) |
|
282 | 282 | elements = sel(doc) |
|
283 | 283 | assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id) |
|
284 | 284 | |
|
285 | 285 | def _ensure_url_equal(self, found, expected): |
|
286 | 286 | assert _Url(found) == _Url(expected) |
|
287 | 287 | |
|
288 | 288 | def get_element(self, css_selector): |
|
289 | 289 | elements = self._get_elements(css_selector) |
|
290 | 290 | assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector) |
|
291 | 291 | return elements[0] |
|
292 | 292 | |
|
293 | 293 | def get_elements(self, css_selector): |
|
294 | 294 | return self._get_elements(css_selector) |
|
295 | 295 | |
|
296 | 296 | def _get_elements(self, css_selector): |
|
297 | 297 | fromstring, tostring, CSSSelector = self.get_imports() |
|
298 | 298 | doc = fromstring(self.response.body) |
|
299 | 299 | sel = CSSSelector(css_selector) |
|
300 | 300 | elements = sel(doc) |
|
301 | 301 | return elements |
|
302 | 302 | |
|
303 | 303 | def _element_to_string(self, element): |
|
304 | 304 | fromstring, tostring, CSSSelector = self.get_imports() |
|
305 | 305 | return tostring(element) |
|
306 | 306 | |
|
307 | 307 | |
|
308 | 308 | class _Url(object): |
|
309 | 309 | """ |
|
310 | 310 | A url object that can be compared with other url orbjects |
|
311 | 311 | without regard to the vagaries of encoding, escaping, and ordering |
|
312 | 312 | of parameters in query strings. |
|
313 | 313 | |
|
314 | 314 | Inspired by |
|
315 | 315 | http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python |
|
316 | 316 | """ |
|
317 | 317 | |
|
318 | 318 | def __init__(self, url): |
|
319 | parts = urlparse(url) | |
|
320 | _query = frozenset(parse_qsl(parts.query)) | |
|
319 | parts = urllib.parse.urlparse(url) | |
|
320 | _query = frozenset(urllib.parse.parse_qsl(parts.query)) | |
|
321 | 321 | _path = unquote_plus(parts.path) |
|
322 | 322 | parts = parts._replace(query=_query, path=_path) |
|
323 | 323 | self.parts = parts |
|
324 | 324 | |
|
325 | 325 | def __eq__(self, other): |
|
326 | 326 | return self.parts == other.parts |
|
327 | 327 | |
|
328 | 328 | def __hash__(self): |
|
329 | 329 | return hash(self.parts) |
|
330 | 330 | |
|
331 | 331 | |
|
332 | 332 | def run_test_concurrently(times, raise_catched_exc=True): |
|
333 | 333 | """ |
|
334 | 334 | Add this decorator to small pieces of code that you want to test |
|
335 | 335 | concurrently |
|
336 | 336 | |
|
337 | 337 | ex: |
|
338 | 338 | |
|
339 | 339 | @test_concurrently(25) |
|
340 | 340 | def my_test_function(): |
|
341 | 341 | ... |
|
342 | 342 | """ |
|
343 | 343 | def test_concurrently_decorator(test_func): |
|
344 | 344 | def wrapper(*args, **kwargs): |
|
345 | 345 | exceptions = [] |
|
346 | 346 | |
|
347 | 347 | def call_test_func(): |
|
348 | 348 | try: |
|
349 | 349 | test_func(*args, **kwargs) |
|
350 | 350 | except Exception as e: |
|
351 | 351 | exceptions.append(e) |
|
352 | 352 | if raise_catched_exc: |
|
353 | 353 | raise |
|
354 | 354 | threads = [] |
|
355 | 355 | for i in range(times): |
|
356 | 356 | threads.append(threading.Thread(target=call_test_func)) |
|
357 | 357 | for t in threads: |
|
358 | 358 | t.start() |
|
359 | 359 | for t in threads: |
|
360 | 360 | t.join() |
|
361 | 361 | if exceptions: |
|
362 | 362 | raise Exception( |
|
363 | 363 | 'test_concurrently intercepted %s exceptions: %s' % ( |
|
364 | 364 | len(exceptions), exceptions)) |
|
365 | 365 | return wrapper |
|
366 | 366 | return test_concurrently_decorator |
|
367 | 367 | |
|
368 | 368 | |
|
369 | 369 | def wait_for_url(url, timeout=10): |
|
370 | 370 | """ |
|
371 | 371 | Wait until URL becomes reachable. |
|
372 | 372 | |
|
373 | 373 | It polls the URL until the timeout is reached or it became reachable. |
|
374 | 374 | If will call to `py.test.fail` in case the URL is not reachable. |
|
375 | 375 | """ |
|
376 | 376 | timeout = time.time() + timeout |
|
377 | 377 | last = 0 |
|
378 | 378 | wait = 0.1 |
|
379 | 379 | |
|
380 | 380 | while timeout > last: |
|
381 | 381 | last = time.time() |
|
382 | 382 | if is_url_reachable(url): |
|
383 | 383 | break |
|
384 | 384 | elif (last + wait) > time.time(): |
|
385 | 385 | # Go to sleep because not enough time has passed since last check. |
|
386 | 386 | time.sleep(wait) |
|
387 | 387 | else: |
|
388 | 388 | pytest.fail("Timeout while waiting for URL {}".format(url)) |
|
389 | 389 | |
|
390 | 390 | |
|
391 | 391 | def is_url_reachable(url): |
|
392 | 392 | try: |
|
393 | 393 | urllib.request.urlopen(url) |
|
394 | 394 | except urllib.error.URLError: |
|
395 | 395 | log.exception('URL `{}` reach error'.format(url)) |
|
396 | 396 | return False |
|
397 | 397 | return True |
|
398 | 398 | |
|
399 | 399 | |
|
400 | 400 | def repo_on_filesystem(repo_name): |
|
401 | 401 | from rhodecode.lib import vcs |
|
402 | 402 | from rhodecode.tests import TESTS_TMP_PATH |
|
403 | 403 | repo = vcs.get_vcs_instance( |
|
404 | 404 | os.path.join(TESTS_TMP_PATH, repo_name), create=False) |
|
405 | 405 | return repo is not None |
|
406 | 406 | |
|
407 | 407 | |
|
408 | 408 | def commit_change( |
|
409 | 409 | repo, filename, content, message, vcs_type, parent=None, newfile=False): |
|
410 | 410 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
411 | 411 | |
|
412 | 412 | repo = Repository.get_by_repo_name(repo) |
|
413 | 413 | _commit = parent |
|
414 | 414 | if not parent: |
|
415 | 415 | _commit = EmptyCommit(alias=vcs_type) |
|
416 | 416 | |
|
417 | 417 | if newfile: |
|
418 | 418 | nodes = { |
|
419 | 419 | filename: { |
|
420 | 420 | 'content': content |
|
421 | 421 | } |
|
422 | 422 | } |
|
423 | 423 | commit = ScmModel().create_nodes( |
|
424 | 424 | user=TEST_USER_ADMIN_LOGIN, repo=repo, |
|
425 | 425 | message=message, |
|
426 | 426 | nodes=nodes, |
|
427 | 427 | parent_commit=_commit, |
|
428 | 428 | author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN), |
|
429 | 429 | ) |
|
430 | 430 | else: |
|
431 | 431 | commit = ScmModel().commit_change( |
|
432 | 432 | repo=repo.scm_instance(), repo_name=repo.repo_name, |
|
433 | 433 | commit=parent, user=TEST_USER_ADMIN_LOGIN, |
|
434 | 434 | author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN), |
|
435 | 435 | message=message, |
|
436 | 436 | content=content, |
|
437 | 437 | f_path=filename |
|
438 | 438 | ) |
|
439 | 439 | return commit |
|
440 | 440 | |
|
441 | 441 | |
|
442 | 442 | def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None): |
|
443 | 443 | if not default: |
|
444 | 444 | raise ValueError('Permission for default user must be given') |
|
445 | 445 | form_data = [( |
|
446 | 446 | 'csrf_token', csrf_token |
|
447 | 447 | )] |
|
448 | 448 | # add default |
|
449 | 449 | form_data.extend([ |
|
450 | 450 | ('u_perm_1', default) |
|
451 | 451 | ]) |
|
452 | 452 | |
|
453 | 453 | if grant: |
|
454 | 454 | for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1): |
|
455 | 455 | form_data.extend([ |
|
456 | 456 | ('perm_new_member_perm_new{}'.format(cnt), perm), |
|
457 | 457 | ('perm_new_member_id_new{}'.format(cnt), obj_id), |
|
458 | 458 | ('perm_new_member_name_new{}'.format(cnt), obj_name), |
|
459 | 459 | ('perm_new_member_type_new{}'.format(cnt), obj_type), |
|
460 | 460 | |
|
461 | 461 | ]) |
|
462 | 462 | if revoke: |
|
463 | 463 | for obj_id, obj_type in revoke: |
|
464 | 464 | form_data.extend([ |
|
465 | 465 | ('perm_del_member_id_{}'.format(obj_id), obj_id), |
|
466 | 466 | ('perm_del_member_type_{}'.format(obj_id), obj_type), |
|
467 | 467 | ]) |
|
468 | 468 | return form_data |
General Comments 0
You need to be logged in to leave comments.
Login now