Show More
@@ -1,580 +1,580 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import urlparse |
|
21 | import urllib.parse | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 |
|
25 | |||
26 | from rhodecode.tests import ( |
|
26 | from rhodecode.tests import ( | |
27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, |
|
27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, | |
28 | no_newline_id_generator) |
|
28 | no_newline_id_generator) | |
29 | from rhodecode.tests.fixture import Fixture |
|
29 | from rhodecode.tests.fixture import Fixture | |
30 | from rhodecode.lib.auth import check_password |
|
30 | from rhodecode.lib.auth import check_password | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.model.auth_token import AuthTokenModel |
|
32 | from rhodecode.model.auth_token import AuthTokenModel | |
33 | from rhodecode.model.db import User, Notification, UserApiKeys |
|
33 | from rhodecode.model.db import User, Notification, UserApiKeys | |
34 | from rhodecode.model.meta import Session |
|
34 | from rhodecode.model.meta import Session | |
35 |
|
35 | |||
36 | fixture = Fixture() |
|
36 | fixture = Fixture() | |
37 |
|
37 | |||
38 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] |
|
38 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | def route_path(name, params=None, **kwargs): |
|
41 | def route_path(name, params=None, **kwargs): | |
42 | import urllib.request, urllib.parse, urllib.error |
|
42 | import urllib.request, urllib.parse, urllib.error | |
43 | from rhodecode.apps._base import ADMIN_PREFIX |
|
43 | from rhodecode.apps._base import ADMIN_PREFIX | |
44 |
|
44 | |||
45 | base_url = { |
|
45 | base_url = { | |
46 | 'login': ADMIN_PREFIX + '/login', |
|
46 | 'login': ADMIN_PREFIX + '/login', | |
47 | 'logout': ADMIN_PREFIX + '/logout', |
|
47 | 'logout': ADMIN_PREFIX + '/logout', | |
48 | 'register': ADMIN_PREFIX + '/register', |
|
48 | 'register': ADMIN_PREFIX + '/register', | |
49 | 'reset_password': |
|
49 | 'reset_password': | |
50 | ADMIN_PREFIX + '/password_reset', |
|
50 | ADMIN_PREFIX + '/password_reset', | |
51 | 'reset_password_confirmation': |
|
51 | 'reset_password_confirmation': | |
52 | ADMIN_PREFIX + '/password_reset_confirmation', |
|
52 | ADMIN_PREFIX + '/password_reset_confirmation', | |
53 |
|
53 | |||
54 | 'admin_permissions_application': |
|
54 | 'admin_permissions_application': | |
55 | ADMIN_PREFIX + '/permissions/application', |
|
55 | ADMIN_PREFIX + '/permissions/application', | |
56 | 'admin_permissions_application_update': |
|
56 | 'admin_permissions_application_update': | |
57 | ADMIN_PREFIX + '/permissions/application/update', |
|
57 | ADMIN_PREFIX + '/permissions/application/update', | |
58 |
|
58 | |||
59 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' |
|
59 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' | |
60 |
|
60 | |||
61 | }[name].format(**kwargs) |
|
61 | }[name].format(**kwargs) | |
62 |
|
62 | |||
63 | if params: |
|
63 | if params: | |
64 | base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params)) |
|
64 | base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params)) | |
65 | return base_url |
|
65 | return base_url | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | @pytest.mark.usefixtures('app') |
|
68 | @pytest.mark.usefixtures('app') | |
69 | class TestLoginController(object): |
|
69 | class TestLoginController(object): | |
70 | destroy_users = set() |
|
70 | destroy_users = set() | |
71 |
|
71 | |||
72 | @classmethod |
|
72 | @classmethod | |
73 | def teardown_class(cls): |
|
73 | def teardown_class(cls): | |
74 | fixture.destroy_users(cls.destroy_users) |
|
74 | fixture.destroy_users(cls.destroy_users) | |
75 |
|
75 | |||
76 | def teardown_method(self, method): |
|
76 | def teardown_method(self, method): | |
77 | for n in Notification.query().all(): |
|
77 | for n in Notification.query().all(): | |
78 | Session().delete(n) |
|
78 | Session().delete(n) | |
79 |
|
79 | |||
80 | Session().commit() |
|
80 | Session().commit() | |
81 | assert Notification.query().all() == [] |
|
81 | assert Notification.query().all() == [] | |
82 |
|
82 | |||
83 | def test_index(self): |
|
83 | def test_index(self): | |
84 | response = self.app.get(route_path('login')) |
|
84 | response = self.app.get(route_path('login')) | |
85 | assert response.status == '200 OK' |
|
85 | assert response.status == '200 OK' | |
86 | # Test response... |
|
86 | # Test response... | |
87 |
|
87 | |||
88 | def test_login_admin_ok(self): |
|
88 | def test_login_admin_ok(self): | |
89 | response = self.app.post(route_path('login'), |
|
89 | response = self.app.post(route_path('login'), | |
90 | {'username': 'test_admin', |
|
90 | {'username': 'test_admin', | |
91 | 'password': 'test12'}, status=302) |
|
91 | 'password': 'test12'}, status=302) | |
92 | response = response.follow() |
|
92 | response = response.follow() | |
93 | session = response.get_session_from_response() |
|
93 | session = response.get_session_from_response() | |
94 | username = session['rhodecode_user'].get('username') |
|
94 | username = session['rhodecode_user'].get('username') | |
95 | assert username == 'test_admin' |
|
95 | assert username == 'test_admin' | |
96 | response.mustcontain('logout') |
|
96 | response.mustcontain('logout') | |
97 |
|
97 | |||
98 | def test_login_regular_ok(self): |
|
98 | def test_login_regular_ok(self): | |
99 | response = self.app.post(route_path('login'), |
|
99 | response = self.app.post(route_path('login'), | |
100 | {'username': 'test_regular', |
|
100 | {'username': 'test_regular', | |
101 | 'password': 'test12'}, status=302) |
|
101 | 'password': 'test12'}, status=302) | |
102 |
|
102 | |||
103 | response = response.follow() |
|
103 | response = response.follow() | |
104 | session = response.get_session_from_response() |
|
104 | session = response.get_session_from_response() | |
105 | username = session['rhodecode_user'].get('username') |
|
105 | username = session['rhodecode_user'].get('username') | |
106 | assert username == 'test_regular' |
|
106 | assert username == 'test_regular' | |
107 | response.mustcontain('logout') |
|
107 | response.mustcontain('logout') | |
108 |
|
108 | |||
109 | def test_login_regular_forbidden_when_super_admin_restriction(self): |
|
109 | def test_login_regular_forbidden_when_super_admin_restriction(self): | |
110 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
110 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin | |
111 | with fixture.auth_restriction(self.app._pyramid_registry, |
|
111 | with fixture.auth_restriction(self.app._pyramid_registry, | |
112 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN): |
|
112 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN): | |
113 | response = self.app.post(route_path('login'), |
|
113 | response = self.app.post(route_path('login'), | |
114 | {'username': 'test_regular', |
|
114 | {'username': 'test_regular', | |
115 | 'password': 'test12'}) |
|
115 | 'password': 'test12'}) | |
116 |
|
116 | |||
117 | response.mustcontain('invalid user name') |
|
117 | response.mustcontain('invalid user name') | |
118 | response.mustcontain('invalid password') |
|
118 | response.mustcontain('invalid password') | |
119 |
|
119 | |||
120 | def test_login_regular_forbidden_when_scope_restriction(self): |
|
120 | def test_login_regular_forbidden_when_scope_restriction(self): | |
121 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
121 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin | |
122 | with fixture.scope_restriction(self.app._pyramid_registry, |
|
122 | with fixture.scope_restriction(self.app._pyramid_registry, | |
123 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS): |
|
123 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS): | |
124 | response = self.app.post(route_path('login'), |
|
124 | response = self.app.post(route_path('login'), | |
125 | {'username': 'test_regular', |
|
125 | {'username': 'test_regular', | |
126 | 'password': 'test12'}) |
|
126 | 'password': 'test12'}) | |
127 |
|
127 | |||
128 | response.mustcontain('invalid user name') |
|
128 | response.mustcontain('invalid user name') | |
129 | response.mustcontain('invalid password') |
|
129 | response.mustcontain('invalid password') | |
130 |
|
130 | |||
131 | def test_login_ok_came_from(self): |
|
131 | def test_login_ok_came_from(self): | |
132 | test_came_from = '/_admin/users?branch=stable' |
|
132 | test_came_from = '/_admin/users?branch=stable' | |
133 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) |
|
133 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) | |
134 | response = self.app.post( |
|
134 | response = self.app.post( | |
135 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) |
|
135 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) | |
136 |
|
136 | |||
137 | assert 'branch=stable' in response.location |
|
137 | assert 'branch=stable' in response.location | |
138 | response = response.follow() |
|
138 | response = response.follow() | |
139 |
|
139 | |||
140 | assert response.status == '200 OK' |
|
140 | assert response.status == '200 OK' | |
141 | response.mustcontain('Users administration') |
|
141 | response.mustcontain('Users administration') | |
142 |
|
142 | |||
143 | def test_redirect_to_login_with_get_args(self): |
|
143 | def test_redirect_to_login_with_get_args(self): | |
144 | with fixture.anon_access(False): |
|
144 | with fixture.anon_access(False): | |
145 | kwargs = {'branch': 'stable'} |
|
145 | kwargs = {'branch': 'stable'} | |
146 | response = self.app.get( |
|
146 | response = self.app.get( | |
147 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs), |
|
147 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs), | |
148 | status=302) |
|
148 | status=302) | |
149 |
|
149 | |||
150 | response_query = urlparse.parse_qsl(response.location) |
|
150 | response_query = urllib.parse.urlparse.parse_qsl(response.location) | |
151 | assert 'branch=stable' in response_query[0][1] |
|
151 | assert 'branch=stable' in response_query[0][1] | |
152 |
|
152 | |||
153 | def test_login_form_with_get_args(self): |
|
153 | def test_login_form_with_get_args(self): | |
154 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) |
|
154 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) | |
155 | response = self.app.get(_url) |
|
155 | response = self.app.get(_url) | |
156 | assert 'branch%3Dstable' in response.form.action |
|
156 | assert 'branch%3Dstable' in response.form.action | |
157 |
|
157 | |||
158 | @pytest.mark.parametrize("url_came_from", [ |
|
158 | @pytest.mark.parametrize("url_came_from", [ | |
159 | 'data:text/html,<script>window.alert("xss")</script>', |
|
159 | 'data:text/html,<script>window.alert("xss")</script>', | |
160 | 'mailto:test@rhodecode.org', |
|
160 | 'mailto:test@rhodecode.org', | |
161 | 'file:///etc/passwd', |
|
161 | 'file:///etc/passwd', | |
162 | 'ftp://some.ftp.server', |
|
162 | 'ftp://some.ftp.server', | |
163 | 'http://other.domain', |
|
163 | 'http://other.domain', | |
164 | '/\r\nX-Forwarded-Host: http://example.org', |
|
164 | '/\r\nX-Forwarded-Host: http://example.org', | |
165 | ], ids=no_newline_id_generator) |
|
165 | ], ids=no_newline_id_generator) | |
166 | def test_login_bad_came_froms(self, url_came_from): |
|
166 | def test_login_bad_came_froms(self, url_came_from): | |
167 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) |
|
167 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) | |
168 | response = self.app.post( |
|
168 | response = self.app.post( | |
169 | _url, |
|
169 | _url, | |
170 | {'username': 'test_admin', 'password': 'test12'}) |
|
170 | {'username': 'test_admin', 'password': 'test12'}) | |
171 | assert response.status == '302 Found' |
|
171 | assert response.status == '302 Found' | |
172 | response = response.follow() |
|
172 | response = response.follow() | |
173 | assert response.status == '200 OK' |
|
173 | assert response.status == '200 OK' | |
174 | assert response.request.path == '/' |
|
174 | assert response.request.path == '/' | |
175 |
|
175 | |||
176 | def test_login_short_password(self): |
|
176 | def test_login_short_password(self): | |
177 | response = self.app.post(route_path('login'), |
|
177 | response = self.app.post(route_path('login'), | |
178 | {'username': 'test_admin', |
|
178 | {'username': 'test_admin', | |
179 | 'password': 'as'}) |
|
179 | 'password': 'as'}) | |
180 | assert response.status == '200 OK' |
|
180 | assert response.status == '200 OK' | |
181 |
|
181 | |||
182 | response.mustcontain('Enter 3 characters or more') |
|
182 | response.mustcontain('Enter 3 characters or more') | |
183 |
|
183 | |||
184 | def test_login_wrong_non_ascii_password(self, user_regular): |
|
184 | def test_login_wrong_non_ascii_password(self, user_regular): | |
185 | response = self.app.post( |
|
185 | response = self.app.post( | |
186 | route_path('login'), |
|
186 | route_path('login'), | |
187 | {'username': user_regular.username, |
|
187 | {'username': user_regular.username, | |
188 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) |
|
188 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) | |
189 |
|
189 | |||
190 | response.mustcontain('invalid user name') |
|
190 | response.mustcontain('invalid user name') | |
191 | response.mustcontain('invalid password') |
|
191 | response.mustcontain('invalid password') | |
192 |
|
192 | |||
193 | def test_login_with_non_ascii_password(self, user_util): |
|
193 | def test_login_with_non_ascii_password(self, user_util): | |
194 | password = u'valid-non-ascii\xe4' |
|
194 | password = u'valid-non-ascii\xe4' | |
195 | user = user_util.create_user(password=password) |
|
195 | user = user_util.create_user(password=password) | |
196 | response = self.app.post( |
|
196 | response = self.app.post( | |
197 | route_path('login'), |
|
197 | route_path('login'), | |
198 | {'username': user.username, |
|
198 | {'username': user.username, | |
199 | 'password': password.encode('utf-8')}) |
|
199 | 'password': password.encode('utf-8')}) | |
200 | assert response.status_code == 302 |
|
200 | assert response.status_code == 302 | |
201 |
|
201 | |||
202 | def test_login_wrong_username_password(self): |
|
202 | def test_login_wrong_username_password(self): | |
203 | response = self.app.post(route_path('login'), |
|
203 | response = self.app.post(route_path('login'), | |
204 | {'username': 'error', |
|
204 | {'username': 'error', | |
205 | 'password': 'test12'}) |
|
205 | 'password': 'test12'}) | |
206 |
|
206 | |||
207 | response.mustcontain('invalid user name') |
|
207 | response.mustcontain('invalid user name') | |
208 | response.mustcontain('invalid password') |
|
208 | response.mustcontain('invalid password') | |
209 |
|
209 | |||
210 | def test_login_admin_ok_password_migration(self, real_crypto_backend): |
|
210 | def test_login_admin_ok_password_migration(self, real_crypto_backend): | |
211 | from rhodecode.lib import auth |
|
211 | from rhodecode.lib import auth | |
212 |
|
212 | |||
213 | # create new user, with sha256 password |
|
213 | # create new user, with sha256 password | |
214 | temp_user = 'test_admin_sha256' |
|
214 | temp_user = 'test_admin_sha256' | |
215 | user = fixture.create_user(temp_user) |
|
215 | user = fixture.create_user(temp_user) | |
216 | user.password = auth._RhodeCodeCryptoSha256().hash_create( |
|
216 | user.password = auth._RhodeCodeCryptoSha256().hash_create( | |
217 | b'test123') |
|
217 | b'test123') | |
218 | Session().add(user) |
|
218 | Session().add(user) | |
219 | Session().commit() |
|
219 | Session().commit() | |
220 | self.destroy_users.add(temp_user) |
|
220 | self.destroy_users.add(temp_user) | |
221 | response = self.app.post(route_path('login'), |
|
221 | response = self.app.post(route_path('login'), | |
222 | {'username': temp_user, |
|
222 | {'username': temp_user, | |
223 | 'password': 'test123'}, status=302) |
|
223 | 'password': 'test123'}, status=302) | |
224 |
|
224 | |||
225 | response = response.follow() |
|
225 | response = response.follow() | |
226 | session = response.get_session_from_response() |
|
226 | session = response.get_session_from_response() | |
227 | username = session['rhodecode_user'].get('username') |
|
227 | username = session['rhodecode_user'].get('username') | |
228 | assert username == temp_user |
|
228 | assert username == temp_user | |
229 | response.mustcontain('logout') |
|
229 | response.mustcontain('logout') | |
230 |
|
230 | |||
231 | # new password should be bcrypted, after log-in and transfer |
|
231 | # new password should be bcrypted, after log-in and transfer | |
232 | user = User.get_by_username(temp_user) |
|
232 | user = User.get_by_username(temp_user) | |
233 | assert user.password.startswith('$') |
|
233 | assert user.password.startswith('$') | |
234 |
|
234 | |||
235 | # REGISTRATIONS |
|
235 | # REGISTRATIONS | |
236 | def test_register(self): |
|
236 | def test_register(self): | |
237 | response = self.app.get(route_path('register')) |
|
237 | response = self.app.get(route_path('register')) | |
238 | response.mustcontain('Create an Account') |
|
238 | response.mustcontain('Create an Account') | |
239 |
|
239 | |||
240 | def test_register_err_same_username(self): |
|
240 | def test_register_err_same_username(self): | |
241 | uname = 'test_admin' |
|
241 | uname = 'test_admin' | |
242 | response = self.app.post( |
|
242 | response = self.app.post( | |
243 | route_path('register'), |
|
243 | route_path('register'), | |
244 | { |
|
244 | { | |
245 | 'username': uname, |
|
245 | 'username': uname, | |
246 | 'password': 'test12', |
|
246 | 'password': 'test12', | |
247 | 'password_confirmation': 'test12', |
|
247 | 'password_confirmation': 'test12', | |
248 | 'email': 'goodmail@domain.com', |
|
248 | 'email': 'goodmail@domain.com', | |
249 | 'firstname': 'test', |
|
249 | 'firstname': 'test', | |
250 | 'lastname': 'test' |
|
250 | 'lastname': 'test' | |
251 | } |
|
251 | } | |
252 | ) |
|
252 | ) | |
253 |
|
253 | |||
254 | assertr = response.assert_response() |
|
254 | assertr = response.assert_response() | |
255 | msg = 'Username "%(username)s" already exists' |
|
255 | msg = 'Username "%(username)s" already exists' | |
256 | msg = msg % {'username': uname} |
|
256 | msg = msg % {'username': uname} | |
257 | assertr.element_contains('#username+.error-message', msg) |
|
257 | assertr.element_contains('#username+.error-message', msg) | |
258 |
|
258 | |||
259 | def test_register_err_same_email(self): |
|
259 | def test_register_err_same_email(self): | |
260 | response = self.app.post( |
|
260 | response = self.app.post( | |
261 | route_path('register'), |
|
261 | route_path('register'), | |
262 | { |
|
262 | { | |
263 | 'username': 'test_admin_0', |
|
263 | 'username': 'test_admin_0', | |
264 | 'password': 'test12', |
|
264 | 'password': 'test12', | |
265 | 'password_confirmation': 'test12', |
|
265 | 'password_confirmation': 'test12', | |
266 | 'email': 'test_admin@mail.com', |
|
266 | 'email': 'test_admin@mail.com', | |
267 | 'firstname': 'test', |
|
267 | 'firstname': 'test', | |
268 | 'lastname': 'test' |
|
268 | 'lastname': 'test' | |
269 | } |
|
269 | } | |
270 | ) |
|
270 | ) | |
271 |
|
271 | |||
272 | assertr = response.assert_response() |
|
272 | assertr = response.assert_response() | |
273 | msg = u'This e-mail address is already taken' |
|
273 | msg = u'This e-mail address is already taken' | |
274 | assertr.element_contains('#email+.error-message', msg) |
|
274 | assertr.element_contains('#email+.error-message', msg) | |
275 |
|
275 | |||
276 | def test_register_err_same_email_case_sensitive(self): |
|
276 | def test_register_err_same_email_case_sensitive(self): | |
277 | response = self.app.post( |
|
277 | response = self.app.post( | |
278 | route_path('register'), |
|
278 | route_path('register'), | |
279 | { |
|
279 | { | |
280 | 'username': 'test_admin_1', |
|
280 | 'username': 'test_admin_1', | |
281 | 'password': 'test12', |
|
281 | 'password': 'test12', | |
282 | 'password_confirmation': 'test12', |
|
282 | 'password_confirmation': 'test12', | |
283 | 'email': 'TesT_Admin@mail.COM', |
|
283 | 'email': 'TesT_Admin@mail.COM', | |
284 | 'firstname': 'test', |
|
284 | 'firstname': 'test', | |
285 | 'lastname': 'test' |
|
285 | 'lastname': 'test' | |
286 | } |
|
286 | } | |
287 | ) |
|
287 | ) | |
288 | assertr = response.assert_response() |
|
288 | assertr = response.assert_response() | |
289 | msg = u'This e-mail address is already taken' |
|
289 | msg = u'This e-mail address is already taken' | |
290 | assertr.element_contains('#email+.error-message', msg) |
|
290 | assertr.element_contains('#email+.error-message', msg) | |
291 |
|
291 | |||
292 | def test_register_err_wrong_data(self): |
|
292 | def test_register_err_wrong_data(self): | |
293 | response = self.app.post( |
|
293 | response = self.app.post( | |
294 | route_path('register'), |
|
294 | route_path('register'), | |
295 | { |
|
295 | { | |
296 | 'username': 'xs', |
|
296 | 'username': 'xs', | |
297 | 'password': 'test', |
|
297 | 'password': 'test', | |
298 | 'password_confirmation': 'test', |
|
298 | 'password_confirmation': 'test', | |
299 | 'email': 'goodmailm', |
|
299 | 'email': 'goodmailm', | |
300 | 'firstname': 'test', |
|
300 | 'firstname': 'test', | |
301 | 'lastname': 'test' |
|
301 | 'lastname': 'test' | |
302 | } |
|
302 | } | |
303 | ) |
|
303 | ) | |
304 | assert response.status == '200 OK' |
|
304 | assert response.status == '200 OK' | |
305 | response.mustcontain('An email address must contain a single @') |
|
305 | response.mustcontain('An email address must contain a single @') | |
306 | response.mustcontain('Enter a value 6 characters long or more') |
|
306 | response.mustcontain('Enter a value 6 characters long or more') | |
307 |
|
307 | |||
308 | def test_register_err_username(self): |
|
308 | def test_register_err_username(self): | |
309 | response = self.app.post( |
|
309 | response = self.app.post( | |
310 | route_path('register'), |
|
310 | route_path('register'), | |
311 | { |
|
311 | { | |
312 | 'username': 'error user', |
|
312 | 'username': 'error user', | |
313 | 'password': 'test12', |
|
313 | 'password': 'test12', | |
314 | 'password_confirmation': 'test12', |
|
314 | 'password_confirmation': 'test12', | |
315 | 'email': 'goodmailm', |
|
315 | 'email': 'goodmailm', | |
316 | 'firstname': 'test', |
|
316 | 'firstname': 'test', | |
317 | 'lastname': 'test' |
|
317 | 'lastname': 'test' | |
318 | } |
|
318 | } | |
319 | ) |
|
319 | ) | |
320 |
|
320 | |||
321 | response.mustcontain('An email address must contain a single @') |
|
321 | response.mustcontain('An email address must contain a single @') | |
322 | response.mustcontain( |
|
322 | response.mustcontain( | |
323 | 'Username may only contain ' |
|
323 | 'Username may only contain ' | |
324 | 'alphanumeric characters underscores, ' |
|
324 | 'alphanumeric characters underscores, ' | |
325 | 'periods or dashes and must begin with ' |
|
325 | 'periods or dashes and must begin with ' | |
326 | 'alphanumeric character') |
|
326 | 'alphanumeric character') | |
327 |
|
327 | |||
328 | def test_register_err_case_sensitive(self): |
|
328 | def test_register_err_case_sensitive(self): | |
329 | usr = 'Test_Admin' |
|
329 | usr = 'Test_Admin' | |
330 | response = self.app.post( |
|
330 | response = self.app.post( | |
331 | route_path('register'), |
|
331 | route_path('register'), | |
332 | { |
|
332 | { | |
333 | 'username': usr, |
|
333 | 'username': usr, | |
334 | 'password': 'test12', |
|
334 | 'password': 'test12', | |
335 | 'password_confirmation': 'test12', |
|
335 | 'password_confirmation': 'test12', | |
336 | 'email': 'goodmailm', |
|
336 | 'email': 'goodmailm', | |
337 | 'firstname': 'test', |
|
337 | 'firstname': 'test', | |
338 | 'lastname': 'test' |
|
338 | 'lastname': 'test' | |
339 | } |
|
339 | } | |
340 | ) |
|
340 | ) | |
341 |
|
341 | |||
342 | assertr = response.assert_response() |
|
342 | assertr = response.assert_response() | |
343 | msg = u'Username "%(username)s" already exists' |
|
343 | msg = u'Username "%(username)s" already exists' | |
344 | msg = msg % {'username': usr} |
|
344 | msg = msg % {'username': usr} | |
345 | assertr.element_contains('#username+.error-message', msg) |
|
345 | assertr.element_contains('#username+.error-message', msg) | |
346 |
|
346 | |||
347 | def test_register_special_chars(self): |
|
347 | def test_register_special_chars(self): | |
348 | response = self.app.post( |
|
348 | response = self.app.post( | |
349 | route_path('register'), |
|
349 | route_path('register'), | |
350 | { |
|
350 | { | |
351 | 'username': 'xxxaxn', |
|
351 | 'username': 'xxxaxn', | |
352 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
352 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
353 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
353 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
354 | 'email': 'goodmailm@test.plx', |
|
354 | 'email': 'goodmailm@test.plx', | |
355 | 'firstname': 'test', |
|
355 | 'firstname': 'test', | |
356 | 'lastname': 'test' |
|
356 | 'lastname': 'test' | |
357 | } |
|
357 | } | |
358 | ) |
|
358 | ) | |
359 |
|
359 | |||
360 | msg = u'Invalid characters (non-ascii) in password' |
|
360 | msg = u'Invalid characters (non-ascii) in password' | |
361 | response.mustcontain(msg) |
|
361 | response.mustcontain(msg) | |
362 |
|
362 | |||
363 | def test_register_password_mismatch(self): |
|
363 | def test_register_password_mismatch(self): | |
364 | response = self.app.post( |
|
364 | response = self.app.post( | |
365 | route_path('register'), |
|
365 | route_path('register'), | |
366 | { |
|
366 | { | |
367 | 'username': 'xs', |
|
367 | 'username': 'xs', | |
368 | 'password': '123qwe', |
|
368 | 'password': '123qwe', | |
369 | 'password_confirmation': 'qwe123', |
|
369 | 'password_confirmation': 'qwe123', | |
370 | 'email': 'goodmailm@test.plxa', |
|
370 | 'email': 'goodmailm@test.plxa', | |
371 | 'firstname': 'test', |
|
371 | 'firstname': 'test', | |
372 | 'lastname': 'test' |
|
372 | 'lastname': 'test' | |
373 | } |
|
373 | } | |
374 | ) |
|
374 | ) | |
375 | msg = u'Passwords do not match' |
|
375 | msg = u'Passwords do not match' | |
376 | response.mustcontain(msg) |
|
376 | response.mustcontain(msg) | |
377 |
|
377 | |||
378 | def test_register_ok(self): |
|
378 | def test_register_ok(self): | |
379 | username = 'test_regular4' |
|
379 | username = 'test_regular4' | |
380 | password = 'qweqwe' |
|
380 | password = 'qweqwe' | |
381 | email = 'marcin@test.com' |
|
381 | email = 'marcin@test.com' | |
382 | name = 'testname' |
|
382 | name = 'testname' | |
383 | lastname = 'testlastname' |
|
383 | lastname = 'testlastname' | |
384 |
|
384 | |||
385 | # this initializes a session |
|
385 | # this initializes a session | |
386 | response = self.app.get(route_path('register')) |
|
386 | response = self.app.get(route_path('register')) | |
387 | response.mustcontain('Create an Account') |
|
387 | response.mustcontain('Create an Account') | |
388 |
|
388 | |||
389 |
|
389 | |||
390 | response = self.app.post( |
|
390 | response = self.app.post( | |
391 | route_path('register'), |
|
391 | route_path('register'), | |
392 | { |
|
392 | { | |
393 | 'username': username, |
|
393 | 'username': username, | |
394 | 'password': password, |
|
394 | 'password': password, | |
395 | 'password_confirmation': password, |
|
395 | 'password_confirmation': password, | |
396 | 'email': email, |
|
396 | 'email': email, | |
397 | 'firstname': name, |
|
397 | 'firstname': name, | |
398 | 'lastname': lastname, |
|
398 | 'lastname': lastname, | |
399 | 'admin': True |
|
399 | 'admin': True | |
400 | }, |
|
400 | }, | |
401 | status=302 |
|
401 | status=302 | |
402 | ) # This should be overridden |
|
402 | ) # This should be overridden | |
403 |
|
403 | |||
404 | assert_session_flash( |
|
404 | assert_session_flash( | |
405 | response, 'You have successfully registered with RhodeCode. You can log-in now.') |
|
405 | response, 'You have successfully registered with RhodeCode. You can log-in now.') | |
406 |
|
406 | |||
407 | ret = Session().query(User).filter( |
|
407 | ret = Session().query(User).filter( | |
408 | User.username == 'test_regular4').one() |
|
408 | User.username == 'test_regular4').one() | |
409 | assert ret.username == username |
|
409 | assert ret.username == username | |
410 | assert check_password(password, ret.password) |
|
410 | assert check_password(password, ret.password) | |
411 | assert ret.email == email |
|
411 | assert ret.email == email | |
412 | assert ret.name == name |
|
412 | assert ret.name == name | |
413 | assert ret.lastname == lastname |
|
413 | assert ret.lastname == lastname | |
414 | assert ret.auth_tokens is not None |
|
414 | assert ret.auth_tokens is not None | |
415 | assert not ret.admin |
|
415 | assert not ret.admin | |
416 |
|
416 | |||
417 | def test_forgot_password_wrong_mail(self): |
|
417 | def test_forgot_password_wrong_mail(self): | |
418 | bad_email = 'marcin@wrongmail.org' |
|
418 | bad_email = 'marcin@wrongmail.org' | |
419 | # this initializes a session |
|
419 | # this initializes a session | |
420 | self.app.get(route_path('reset_password')) |
|
420 | self.app.get(route_path('reset_password')) | |
421 |
|
421 | |||
422 | response = self.app.post( |
|
422 | response = self.app.post( | |
423 | route_path('reset_password'), {'email': bad_email, } |
|
423 | route_path('reset_password'), {'email': bad_email, } | |
424 | ) |
|
424 | ) | |
425 | assert_session_flash(response, |
|
425 | assert_session_flash(response, | |
426 | 'If such email exists, a password reset link was sent to it.') |
|
426 | 'If such email exists, a password reset link was sent to it.') | |
427 |
|
427 | |||
428 | def test_forgot_password(self, user_util): |
|
428 | def test_forgot_password(self, user_util): | |
429 | # this initializes a session |
|
429 | # this initializes a session | |
430 | self.app.get(route_path('reset_password')) |
|
430 | self.app.get(route_path('reset_password')) | |
431 |
|
431 | |||
432 | user = user_util.create_user() |
|
432 | user = user_util.create_user() | |
433 | user_id = user.user_id |
|
433 | user_id = user.user_id | |
434 | email = user.email |
|
434 | email = user.email | |
435 |
|
435 | |||
436 | response = self.app.post(route_path('reset_password'), {'email': email, }) |
|
436 | response = self.app.post(route_path('reset_password'), {'email': email, }) | |
437 |
|
437 | |||
438 | assert_session_flash(response, |
|
438 | assert_session_flash(response, | |
439 | 'If such email exists, a password reset link was sent to it.') |
|
439 | 'If such email exists, a password reset link was sent to it.') | |
440 |
|
440 | |||
441 | # BAD KEY |
|
441 | # BAD KEY | |
442 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') |
|
442 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') | |
443 | response = self.app.get(confirm_url, status=302) |
|
443 | response = self.app.get(confirm_url, status=302) | |
444 | assert response.location.endswith(route_path('reset_password')) |
|
444 | assert response.location.endswith(route_path('reset_password')) | |
445 | assert_session_flash(response, 'Given reset token is invalid') |
|
445 | assert_session_flash(response, 'Given reset token is invalid') | |
446 |
|
446 | |||
447 | response.follow() # cleanup flash |
|
447 | response.follow() # cleanup flash | |
448 |
|
448 | |||
449 | # GOOD KEY |
|
449 | # GOOD KEY | |
450 | key = UserApiKeys.query()\ |
|
450 | key = UserApiKeys.query()\ | |
451 | .filter(UserApiKeys.user_id == user_id)\ |
|
451 | .filter(UserApiKeys.user_id == user_id)\ | |
452 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ |
|
452 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ | |
453 | .first() |
|
453 | .first() | |
454 |
|
454 | |||
455 | assert key |
|
455 | assert key | |
456 |
|
456 | |||
457 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) |
|
457 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) | |
458 | response = self.app.get(confirm_url) |
|
458 | response = self.app.get(confirm_url) | |
459 | assert response.status == '302 Found' |
|
459 | assert response.status == '302 Found' | |
460 | assert response.location.endswith(route_path('login')) |
|
460 | assert response.location.endswith(route_path('login')) | |
461 |
|
461 | |||
462 | assert_session_flash( |
|
462 | assert_session_flash( | |
463 | response, |
|
463 | response, | |
464 | 'Your password reset was successful, ' |
|
464 | 'Your password reset was successful, ' | |
465 | 'a new password has been sent to your email') |
|
465 | 'a new password has been sent to your email') | |
466 |
|
466 | |||
467 | response.follow() |
|
467 | response.follow() | |
468 |
|
468 | |||
469 | def _get_api_whitelist(self, values=None): |
|
469 | def _get_api_whitelist(self, values=None): | |
470 | config = {'api_access_controllers_whitelist': values or []} |
|
470 | config = {'api_access_controllers_whitelist': values or []} | |
471 | return config |
|
471 | return config | |
472 |
|
472 | |||
473 | @pytest.mark.parametrize("test_name, auth_token", [ |
|
473 | @pytest.mark.parametrize("test_name, auth_token", [ | |
474 | ('none', None), |
|
474 | ('none', None), | |
475 | ('empty_string', ''), |
|
475 | ('empty_string', ''), | |
476 | ('fake_number', '123456'), |
|
476 | ('fake_number', '123456'), | |
477 | ('proper_auth_token', None) |
|
477 | ('proper_auth_token', None) | |
478 | ]) |
|
478 | ]) | |
479 | def test_access_not_whitelisted_page_via_auth_token( |
|
479 | def test_access_not_whitelisted_page_via_auth_token( | |
480 | self, test_name, auth_token, user_admin): |
|
480 | self, test_name, auth_token, user_admin): | |
481 |
|
481 | |||
482 | whitelist = self._get_api_whitelist([]) |
|
482 | whitelist = self._get_api_whitelist([]) | |
483 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
483 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
484 | assert [] == whitelist['api_access_controllers_whitelist'] |
|
484 | assert [] == whitelist['api_access_controllers_whitelist'] | |
485 | if test_name == 'proper_auth_token': |
|
485 | if test_name == 'proper_auth_token': | |
486 | # use builtin if api_key is None |
|
486 | # use builtin if api_key is None | |
487 | auth_token = user_admin.api_key |
|
487 | auth_token = user_admin.api_key | |
488 |
|
488 | |||
489 | with fixture.anon_access(False): |
|
489 | with fixture.anon_access(False): | |
490 | self.app.get( |
|
490 | self.app.get( | |
491 | route_path('repo_commit_raw', |
|
491 | route_path('repo_commit_raw', | |
492 | repo_name=HG_REPO, commit_id='tip', |
|
492 | repo_name=HG_REPO, commit_id='tip', | |
493 | params=dict(api_key=auth_token)), |
|
493 | params=dict(api_key=auth_token)), | |
494 | status=302) |
|
494 | status=302) | |
495 |
|
495 | |||
496 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
496 | @pytest.mark.parametrize("test_name, auth_token, code", [ | |
497 | ('none', None, 302), |
|
497 | ('none', None, 302), | |
498 | ('empty_string', '', 302), |
|
498 | ('empty_string', '', 302), | |
499 | ('fake_number', '123456', 302), |
|
499 | ('fake_number', '123456', 302), | |
500 | ('proper_auth_token', None, 200) |
|
500 | ('proper_auth_token', None, 200) | |
501 | ]) |
|
501 | ]) | |
502 | def test_access_whitelisted_page_via_auth_token( |
|
502 | def test_access_whitelisted_page_via_auth_token( | |
503 | self, test_name, auth_token, code, user_admin): |
|
503 | self, test_name, auth_token, code, user_admin): | |
504 |
|
504 | |||
505 | whitelist = self._get_api_whitelist(whitelist_view) |
|
505 | whitelist = self._get_api_whitelist(whitelist_view) | |
506 |
|
506 | |||
507 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
507 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
508 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] |
|
508 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] | |
509 |
|
509 | |||
510 | if test_name == 'proper_auth_token': |
|
510 | if test_name == 'proper_auth_token': | |
511 | auth_token = user_admin.api_key |
|
511 | auth_token = user_admin.api_key | |
512 | assert auth_token |
|
512 | assert auth_token | |
513 |
|
513 | |||
514 | with fixture.anon_access(False): |
|
514 | with fixture.anon_access(False): | |
515 | self.app.get( |
|
515 | self.app.get( | |
516 | route_path('repo_commit_raw', |
|
516 | route_path('repo_commit_raw', | |
517 | repo_name=HG_REPO, commit_id='tip', |
|
517 | repo_name=HG_REPO, commit_id='tip', | |
518 | params=dict(api_key=auth_token)), |
|
518 | params=dict(api_key=auth_token)), | |
519 | status=code) |
|
519 | status=code) | |
520 |
|
520 | |||
521 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
521 | @pytest.mark.parametrize("test_name, auth_token, code", [ | |
522 | ('proper_auth_token', None, 200), |
|
522 | ('proper_auth_token', None, 200), | |
523 | ('wrong_auth_token', '123456', 302), |
|
523 | ('wrong_auth_token', '123456', 302), | |
524 | ]) |
|
524 | ]) | |
525 | def test_access_whitelisted_page_via_auth_token_bound_to_token( |
|
525 | def test_access_whitelisted_page_via_auth_token_bound_to_token( | |
526 | self, test_name, auth_token, code, user_admin): |
|
526 | self, test_name, auth_token, code, user_admin): | |
527 |
|
527 | |||
528 | expected_token = auth_token |
|
528 | expected_token = auth_token | |
529 | if test_name == 'proper_auth_token': |
|
529 | if test_name == 'proper_auth_token': | |
530 | auth_token = user_admin.api_key |
|
530 | auth_token = user_admin.api_key | |
531 | expected_token = auth_token |
|
531 | expected_token = auth_token | |
532 | assert auth_token |
|
532 | assert auth_token | |
533 |
|
533 | |||
534 | whitelist = self._get_api_whitelist([ |
|
534 | whitelist = self._get_api_whitelist([ | |
535 | 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)]) |
|
535 | 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)]) | |
536 |
|
536 | |||
537 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
537 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
538 |
|
538 | |||
539 | with fixture.anon_access(False): |
|
539 | with fixture.anon_access(False): | |
540 | self.app.get( |
|
540 | self.app.get( | |
541 | route_path('repo_commit_raw', |
|
541 | route_path('repo_commit_raw', | |
542 | repo_name=HG_REPO, commit_id='tip', |
|
542 | repo_name=HG_REPO, commit_id='tip', | |
543 | params=dict(api_key=auth_token)), |
|
543 | params=dict(api_key=auth_token)), | |
544 | status=code) |
|
544 | status=code) | |
545 |
|
545 | |||
546 | def test_access_page_via_extra_auth_token(self): |
|
546 | def test_access_page_via_extra_auth_token(self): | |
547 | whitelist = self._get_api_whitelist(whitelist_view) |
|
547 | whitelist = self._get_api_whitelist(whitelist_view) | |
548 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
548 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
549 | assert whitelist_view == \ |
|
549 | assert whitelist_view == \ | |
550 | whitelist['api_access_controllers_whitelist'] |
|
550 | whitelist['api_access_controllers_whitelist'] | |
551 |
|
551 | |||
552 | new_auth_token = AuthTokenModel().create( |
|
552 | new_auth_token = AuthTokenModel().create( | |
553 | TEST_USER_ADMIN_LOGIN, 'test') |
|
553 | TEST_USER_ADMIN_LOGIN, 'test') | |
554 | Session().commit() |
|
554 | Session().commit() | |
555 | with fixture.anon_access(False): |
|
555 | with fixture.anon_access(False): | |
556 | self.app.get( |
|
556 | self.app.get( | |
557 | route_path('repo_commit_raw', |
|
557 | route_path('repo_commit_raw', | |
558 | repo_name=HG_REPO, commit_id='tip', |
|
558 | repo_name=HG_REPO, commit_id='tip', | |
559 | params=dict(api_key=new_auth_token.api_key)), |
|
559 | params=dict(api_key=new_auth_token.api_key)), | |
560 | status=200) |
|
560 | status=200) | |
561 |
|
561 | |||
562 | def test_access_page_via_expired_auth_token(self): |
|
562 | def test_access_page_via_expired_auth_token(self): | |
563 | whitelist = self._get_api_whitelist(whitelist_view) |
|
563 | whitelist = self._get_api_whitelist(whitelist_view) | |
564 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
564 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
565 | assert whitelist_view == \ |
|
565 | assert whitelist_view == \ | |
566 | whitelist['api_access_controllers_whitelist'] |
|
566 | whitelist['api_access_controllers_whitelist'] | |
567 |
|
567 | |||
568 | new_auth_token = AuthTokenModel().create( |
|
568 | new_auth_token = AuthTokenModel().create( | |
569 | TEST_USER_ADMIN_LOGIN, 'test') |
|
569 | TEST_USER_ADMIN_LOGIN, 'test') | |
570 | Session().commit() |
|
570 | Session().commit() | |
571 | # patch the api key and make it expired |
|
571 | # patch the api key and make it expired | |
572 | new_auth_token.expires = 0 |
|
572 | new_auth_token.expires = 0 | |
573 | Session().add(new_auth_token) |
|
573 | Session().add(new_auth_token) | |
574 | Session().commit() |
|
574 | Session().commit() | |
575 | with fixture.anon_access(False): |
|
575 | with fixture.anon_access(False): | |
576 | self.app.get( |
|
576 | self.app.get( | |
577 | route_path('repo_commit_raw', |
|
577 | route_path('repo_commit_raw', | |
578 | repo_name=HG_REPO, commit_id='tip', |
|
578 | repo_name=HG_REPO, commit_id='tip', | |
579 | params=dict(api_key=new_auth_token.api_key)), |
|
579 | params=dict(api_key=new_auth_token.api_key)), | |
580 | status=302) |
|
580 | status=302) |
@@ -1,470 +1,470 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import time |
|
21 | import time | |
22 | import collections |
|
22 | import collections | |
23 | import datetime |
|
23 | import datetime | |
24 | import formencode |
|
24 | import formencode | |
25 | import formencode.htmlfill |
|
25 | import formencode.htmlfill | |
26 | import logging |
|
26 | import logging | |
27 | import urlparse |
|
27 | import urllib.parse | |
28 | import requests |
|
28 | import requests | |
29 |
|
29 | |||
30 | from pyramid.httpexceptions import HTTPFound |
|
30 | from pyramid.httpexceptions import HTTPFound | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | from rhodecode.apps._base import BaseAppView |
|
33 | from rhodecode.apps._base import BaseAppView | |
34 | from rhodecode.authentication.base import authenticate, HTTP_TYPE |
|
34 | from rhodecode.authentication.base import authenticate, HTTP_TYPE | |
35 | from rhodecode.authentication.plugins import auth_rhodecode |
|
35 | from rhodecode.authentication.plugins import auth_rhodecode | |
36 | from rhodecode.events import UserRegistered, trigger |
|
36 | from rhodecode.events import UserRegistered, trigger | |
37 | from rhodecode.lib import helpers as h |
|
37 | from rhodecode.lib import helpers as h | |
38 | from rhodecode.lib import audit_logger |
|
38 | from rhodecode.lib import audit_logger | |
39 | from rhodecode.lib.auth import ( |
|
39 | from rhodecode.lib.auth import ( | |
40 | AuthUser, HasPermissionAnyDecorator, CSRFRequired) |
|
40 | AuthUser, HasPermissionAnyDecorator, CSRFRequired) | |
41 | from rhodecode.lib.base import get_ip_addr |
|
41 | from rhodecode.lib.base import get_ip_addr | |
42 | from rhodecode.lib.exceptions import UserCreationError |
|
42 | from rhodecode.lib.exceptions import UserCreationError | |
43 | from rhodecode.lib.utils2 import safe_str |
|
43 | from rhodecode.lib.utils2 import safe_str | |
44 | from rhodecode.model.db import User, UserApiKeys |
|
44 | from rhodecode.model.db import User, UserApiKeys | |
45 | from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm |
|
45 | from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm | |
46 | from rhodecode.model.meta import Session |
|
46 | from rhodecode.model.meta import Session | |
47 | from rhodecode.model.auth_token import AuthTokenModel |
|
47 | from rhodecode.model.auth_token import AuthTokenModel | |
48 | from rhodecode.model.settings import SettingsModel |
|
48 | from rhodecode.model.settings import SettingsModel | |
49 | from rhodecode.model.user import UserModel |
|
49 | from rhodecode.model.user import UserModel | |
50 | from rhodecode.translation import _ |
|
50 | from rhodecode.translation import _ | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | log = logging.getLogger(__name__) |
|
53 | log = logging.getLogger(__name__) | |
54 |
|
54 | |||
55 | CaptchaData = collections.namedtuple( |
|
55 | CaptchaData = collections.namedtuple( | |
56 | 'CaptchaData', 'active, private_key, public_key') |
|
56 | 'CaptchaData', 'active, private_key, public_key') | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def store_user_in_session(session, username, remember=False): |
|
59 | def store_user_in_session(session, username, remember=False): | |
60 | user = User.get_by_username(username, case_insensitive=True) |
|
60 | user = User.get_by_username(username, case_insensitive=True) | |
61 | auth_user = AuthUser(user.user_id) |
|
61 | auth_user = AuthUser(user.user_id) | |
62 | auth_user.set_authenticated() |
|
62 | auth_user.set_authenticated() | |
63 | cs = auth_user.get_cookie_store() |
|
63 | cs = auth_user.get_cookie_store() | |
64 | session['rhodecode_user'] = cs |
|
64 | session['rhodecode_user'] = cs | |
65 | user.update_lastlogin() |
|
65 | user.update_lastlogin() | |
66 | Session().commit() |
|
66 | Session().commit() | |
67 |
|
67 | |||
68 | # If they want to be remembered, update the cookie |
|
68 | # If they want to be remembered, update the cookie | |
69 | if remember: |
|
69 | if remember: | |
70 | _year = (datetime.datetime.now() + |
|
70 | _year = (datetime.datetime.now() + | |
71 | datetime.timedelta(seconds=60 * 60 * 24 * 365)) |
|
71 | datetime.timedelta(seconds=60 * 60 * 24 * 365)) | |
72 | session._set_cookie_expires(_year) |
|
72 | session._set_cookie_expires(_year) | |
73 |
|
73 | |||
74 | session.save() |
|
74 | session.save() | |
75 |
|
75 | |||
76 | safe_cs = cs.copy() |
|
76 | safe_cs = cs.copy() | |
77 | safe_cs['password'] = '****' |
|
77 | safe_cs['password'] = '****' | |
78 | log.info('user %s is now authenticated and stored in ' |
|
78 | log.info('user %s is now authenticated and stored in ' | |
79 | 'session, session attrs %s', username, safe_cs) |
|
79 | 'session, session attrs %s', username, safe_cs) | |
80 |
|
80 | |||
81 | # dumps session attrs back to cookie |
|
81 | # dumps session attrs back to cookie | |
82 | session._update_cookie_out() |
|
82 | session._update_cookie_out() | |
83 | # we set new cookie |
|
83 | # we set new cookie | |
84 | headers = None |
|
84 | headers = None | |
85 | if session.request['set_cookie']: |
|
85 | if session.request['set_cookie']: | |
86 | # send set-cookie headers back to response to update cookie |
|
86 | # send set-cookie headers back to response to update cookie | |
87 | headers = [('Set-Cookie', session.request['cookie_out'])] |
|
87 | headers = [('Set-Cookie', session.request['cookie_out'])] | |
88 | return headers |
|
88 | return headers | |
89 |
|
89 | |||
90 |
|
90 | |||
91 | def get_came_from(request): |
|
91 | def get_came_from(request): | |
92 | came_from = safe_str(request.GET.get('came_from', '')) |
|
92 | came_from = safe_str(request.GET.get('came_from', '')) | |
93 | parsed = urlparse.urlparse(came_from) |
|
93 | parsed = urllib.parse.urlparse.urlparse(came_from) | |
94 | allowed_schemes = ['http', 'https'] |
|
94 | allowed_schemes = ['http', 'https'] | |
95 | default_came_from = h.route_path('home') |
|
95 | default_came_from = h.route_path('home') | |
96 | if parsed.scheme and parsed.scheme not in allowed_schemes: |
|
96 | if parsed.scheme and parsed.scheme not in allowed_schemes: | |
97 | log.error('Suspicious URL scheme detected %s for url %s', |
|
97 | log.error('Suspicious URL scheme detected %s for url %s', | |
98 | parsed.scheme, parsed) |
|
98 | parsed.scheme, parsed) | |
99 | came_from = default_came_from |
|
99 | came_from = default_came_from | |
100 | elif parsed.netloc and request.host != parsed.netloc: |
|
100 | elif parsed.netloc and request.host != parsed.netloc: | |
101 | log.error('Suspicious NETLOC detected %s for url %s server url ' |
|
101 | log.error('Suspicious NETLOC detected %s for url %s server url ' | |
102 | 'is: %s', parsed.netloc, parsed, request.host) |
|
102 | 'is: %s', parsed.netloc, parsed, request.host) | |
103 | came_from = default_came_from |
|
103 | came_from = default_came_from | |
104 | elif any(bad_str in parsed.path for bad_str in ('\r', '\n')): |
|
104 | elif any(bad_str in parsed.path for bad_str in ('\r', '\n')): | |
105 | log.error('Header injection detected `%s` for url %s server url ', |
|
105 | log.error('Header injection detected `%s` for url %s server url ', | |
106 | parsed.path, parsed) |
|
106 | parsed.path, parsed) | |
107 | came_from = default_came_from |
|
107 | came_from = default_came_from | |
108 |
|
108 | |||
109 | return came_from or default_came_from |
|
109 | return came_from or default_came_from | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | class LoginView(BaseAppView): |
|
112 | class LoginView(BaseAppView): | |
113 |
|
113 | |||
114 | def load_default_context(self): |
|
114 | def load_default_context(self): | |
115 | c = self._get_local_tmpl_context() |
|
115 | c = self._get_local_tmpl_context() | |
116 | c.came_from = get_came_from(self.request) |
|
116 | c.came_from = get_came_from(self.request) | |
117 | return c |
|
117 | return c | |
118 |
|
118 | |||
119 | def _get_captcha_data(self): |
|
119 | def _get_captcha_data(self): | |
120 | settings = SettingsModel().get_all_settings() |
|
120 | settings = SettingsModel().get_all_settings() | |
121 | private_key = settings.get('rhodecode_captcha_private_key') |
|
121 | private_key = settings.get('rhodecode_captcha_private_key') | |
122 | public_key = settings.get('rhodecode_captcha_public_key') |
|
122 | public_key = settings.get('rhodecode_captcha_public_key') | |
123 | active = bool(private_key) |
|
123 | active = bool(private_key) | |
124 | return CaptchaData( |
|
124 | return CaptchaData( | |
125 | active=active, private_key=private_key, public_key=public_key) |
|
125 | active=active, private_key=private_key, public_key=public_key) | |
126 |
|
126 | |||
127 | def validate_captcha(self, private_key): |
|
127 | def validate_captcha(self, private_key): | |
128 |
|
128 | |||
129 | captcha_rs = self.request.POST.get('g-recaptcha-response') |
|
129 | captcha_rs = self.request.POST.get('g-recaptcha-response') | |
130 | url = "https://www.google.com/recaptcha/api/siteverify" |
|
130 | url = "https://www.google.com/recaptcha/api/siteverify" | |
131 | params = { |
|
131 | params = { | |
132 | 'secret': private_key, |
|
132 | 'secret': private_key, | |
133 | 'response': captcha_rs, |
|
133 | 'response': captcha_rs, | |
134 | 'remoteip': get_ip_addr(self.request.environ) |
|
134 | 'remoteip': get_ip_addr(self.request.environ) | |
135 | } |
|
135 | } | |
136 | verify_rs = requests.get(url, params=params, verify=True, timeout=60) |
|
136 | verify_rs = requests.get(url, params=params, verify=True, timeout=60) | |
137 | verify_rs = verify_rs.json() |
|
137 | verify_rs = verify_rs.json() | |
138 | captcha_status = verify_rs.get('success', False) |
|
138 | captcha_status = verify_rs.get('success', False) | |
139 | captcha_errors = verify_rs.get('error-codes', []) |
|
139 | captcha_errors = verify_rs.get('error-codes', []) | |
140 | if not isinstance(captcha_errors, list): |
|
140 | if not isinstance(captcha_errors, list): | |
141 | captcha_errors = [captcha_errors] |
|
141 | captcha_errors = [captcha_errors] | |
142 | captcha_errors = ', '.join(captcha_errors) |
|
142 | captcha_errors = ', '.join(captcha_errors) | |
143 | captcha_message = '' |
|
143 | captcha_message = '' | |
144 | if captcha_status is False: |
|
144 | if captcha_status is False: | |
145 | captcha_message = "Bad captcha. Errors: {}".format( |
|
145 | captcha_message = "Bad captcha. Errors: {}".format( | |
146 | captcha_errors) |
|
146 | captcha_errors) | |
147 |
|
147 | |||
148 | return captcha_status, captcha_message |
|
148 | return captcha_status, captcha_message | |
149 |
|
149 | |||
150 | def login(self): |
|
150 | def login(self): | |
151 | c = self.load_default_context() |
|
151 | c = self.load_default_context() | |
152 | auth_user = self._rhodecode_user |
|
152 | auth_user = self._rhodecode_user | |
153 |
|
153 | |||
154 | # redirect if already logged in |
|
154 | # redirect if already logged in | |
155 | if (auth_user.is_authenticated and |
|
155 | if (auth_user.is_authenticated and | |
156 | not auth_user.is_default and auth_user.ip_allowed): |
|
156 | not auth_user.is_default and auth_user.ip_allowed): | |
157 | raise HTTPFound(c.came_from) |
|
157 | raise HTTPFound(c.came_from) | |
158 |
|
158 | |||
159 | # check if we use headers plugin, and try to login using it. |
|
159 | # check if we use headers plugin, and try to login using it. | |
160 | try: |
|
160 | try: | |
161 | log.debug('Running PRE-AUTH for headers based authentication') |
|
161 | log.debug('Running PRE-AUTH for headers based authentication') | |
162 | auth_info = authenticate( |
|
162 | auth_info = authenticate( | |
163 | '', '', self.request.environ, HTTP_TYPE, skip_missing=True) |
|
163 | '', '', self.request.environ, HTTP_TYPE, skip_missing=True) | |
164 | if auth_info: |
|
164 | if auth_info: | |
165 | headers = store_user_in_session( |
|
165 | headers = store_user_in_session( | |
166 | self.session, auth_info.get('username')) |
|
166 | self.session, auth_info.get('username')) | |
167 | raise HTTPFound(c.came_from, headers=headers) |
|
167 | raise HTTPFound(c.came_from, headers=headers) | |
168 | except UserCreationError as e: |
|
168 | except UserCreationError as e: | |
169 | log.error(e) |
|
169 | log.error(e) | |
170 | h.flash(e, category='error') |
|
170 | h.flash(e, category='error') | |
171 |
|
171 | |||
172 | return self._get_template_context(c) |
|
172 | return self._get_template_context(c) | |
173 |
|
173 | |||
174 | def login_post(self): |
|
174 | def login_post(self): | |
175 | c = self.load_default_context() |
|
175 | c = self.load_default_context() | |
176 |
|
176 | |||
177 | login_form = LoginForm(self.request.translate)() |
|
177 | login_form = LoginForm(self.request.translate)() | |
178 |
|
178 | |||
179 | try: |
|
179 | try: | |
180 | self.session.invalidate() |
|
180 | self.session.invalidate() | |
181 | form_result = login_form.to_python(self.request.POST) |
|
181 | form_result = login_form.to_python(self.request.POST) | |
182 | # form checks for username/password, now we're authenticated |
|
182 | # form checks for username/password, now we're authenticated | |
183 | headers = store_user_in_session( |
|
183 | headers = store_user_in_session( | |
184 | self.session, |
|
184 | self.session, | |
185 | username=form_result['username'], |
|
185 | username=form_result['username'], | |
186 | remember=form_result['remember']) |
|
186 | remember=form_result['remember']) | |
187 | log.debug('Redirecting to "%s" after login.', c.came_from) |
|
187 | log.debug('Redirecting to "%s" after login.', c.came_from) | |
188 |
|
188 | |||
189 | audit_user = audit_logger.UserWrap( |
|
189 | audit_user = audit_logger.UserWrap( | |
190 | username=self.request.POST.get('username'), |
|
190 | username=self.request.POST.get('username'), | |
191 | ip_addr=self.request.remote_addr) |
|
191 | ip_addr=self.request.remote_addr) | |
192 | action_data = {'user_agent': self.request.user_agent} |
|
192 | action_data = {'user_agent': self.request.user_agent} | |
193 | audit_logger.store_web( |
|
193 | audit_logger.store_web( | |
194 | 'user.login.success', action_data=action_data, |
|
194 | 'user.login.success', action_data=action_data, | |
195 | user=audit_user, commit=True) |
|
195 | user=audit_user, commit=True) | |
196 |
|
196 | |||
197 | raise HTTPFound(c.came_from, headers=headers) |
|
197 | raise HTTPFound(c.came_from, headers=headers) | |
198 | except formencode.Invalid as errors: |
|
198 | except formencode.Invalid as errors: | |
199 | defaults = errors.value |
|
199 | defaults = errors.value | |
200 | # remove password from filling in form again |
|
200 | # remove password from filling in form again | |
201 | defaults.pop('password', None) |
|
201 | defaults.pop('password', None) | |
202 | render_ctx = { |
|
202 | render_ctx = { | |
203 | 'errors': errors.error_dict, |
|
203 | 'errors': errors.error_dict, | |
204 | 'defaults': defaults, |
|
204 | 'defaults': defaults, | |
205 | } |
|
205 | } | |
206 |
|
206 | |||
207 | audit_user = audit_logger.UserWrap( |
|
207 | audit_user = audit_logger.UserWrap( | |
208 | username=self.request.POST.get('username'), |
|
208 | username=self.request.POST.get('username'), | |
209 | ip_addr=self.request.remote_addr) |
|
209 | ip_addr=self.request.remote_addr) | |
210 | action_data = {'user_agent': self.request.user_agent} |
|
210 | action_data = {'user_agent': self.request.user_agent} | |
211 | audit_logger.store_web( |
|
211 | audit_logger.store_web( | |
212 | 'user.login.failure', action_data=action_data, |
|
212 | 'user.login.failure', action_data=action_data, | |
213 | user=audit_user, commit=True) |
|
213 | user=audit_user, commit=True) | |
214 | return self._get_template_context(c, **render_ctx) |
|
214 | return self._get_template_context(c, **render_ctx) | |
215 |
|
215 | |||
216 | except UserCreationError as e: |
|
216 | except UserCreationError as e: | |
217 | # headers auth or other auth functions that create users on |
|
217 | # headers auth or other auth functions that create users on | |
218 | # the fly can throw this exception signaling that there's issue |
|
218 | # the fly can throw this exception signaling that there's issue | |
219 | # with user creation, explanation should be provided in |
|
219 | # with user creation, explanation should be provided in | |
220 | # Exception itself |
|
220 | # Exception itself | |
221 | h.flash(e, category='error') |
|
221 | h.flash(e, category='error') | |
222 | return self._get_template_context(c) |
|
222 | return self._get_template_context(c) | |
223 |
|
223 | |||
224 | @CSRFRequired() |
|
224 | @CSRFRequired() | |
225 | def logout(self): |
|
225 | def logout(self): | |
226 | auth_user = self._rhodecode_user |
|
226 | auth_user = self._rhodecode_user | |
227 | log.info('Deleting session for user: `%s`', auth_user) |
|
227 | log.info('Deleting session for user: `%s`', auth_user) | |
228 |
|
228 | |||
229 | action_data = {'user_agent': self.request.user_agent} |
|
229 | action_data = {'user_agent': self.request.user_agent} | |
230 | audit_logger.store_web( |
|
230 | audit_logger.store_web( | |
231 | 'user.logout', action_data=action_data, |
|
231 | 'user.logout', action_data=action_data, | |
232 | user=auth_user, commit=True) |
|
232 | user=auth_user, commit=True) | |
233 | self.session.delete() |
|
233 | self.session.delete() | |
234 | return HTTPFound(h.route_path('home')) |
|
234 | return HTTPFound(h.route_path('home')) | |
235 |
|
235 | |||
236 | @HasPermissionAnyDecorator( |
|
236 | @HasPermissionAnyDecorator( | |
237 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
237 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') | |
238 | def register(self, defaults=None, errors=None): |
|
238 | def register(self, defaults=None, errors=None): | |
239 | c = self.load_default_context() |
|
239 | c = self.load_default_context() | |
240 | defaults = defaults or {} |
|
240 | defaults = defaults or {} | |
241 | errors = errors or {} |
|
241 | errors = errors or {} | |
242 |
|
242 | |||
243 | settings = SettingsModel().get_all_settings() |
|
243 | settings = SettingsModel().get_all_settings() | |
244 | register_message = settings.get('rhodecode_register_message') or '' |
|
244 | register_message = settings.get('rhodecode_register_message') or '' | |
245 | captcha = self._get_captcha_data() |
|
245 | captcha = self._get_captcha_data() | |
246 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
246 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ | |
247 | .AuthUser().permissions['global'] |
|
247 | .AuthUser().permissions['global'] | |
248 |
|
248 | |||
249 | render_ctx = self._get_template_context(c) |
|
249 | render_ctx = self._get_template_context(c) | |
250 | render_ctx.update({ |
|
250 | render_ctx.update({ | |
251 | 'defaults': defaults, |
|
251 | 'defaults': defaults, | |
252 | 'errors': errors, |
|
252 | 'errors': errors, | |
253 | 'auto_active': auto_active, |
|
253 | 'auto_active': auto_active, | |
254 | 'captcha_active': captcha.active, |
|
254 | 'captcha_active': captcha.active, | |
255 | 'captcha_public_key': captcha.public_key, |
|
255 | 'captcha_public_key': captcha.public_key, | |
256 | 'register_message': register_message, |
|
256 | 'register_message': register_message, | |
257 | }) |
|
257 | }) | |
258 | return render_ctx |
|
258 | return render_ctx | |
259 |
|
259 | |||
260 | @HasPermissionAnyDecorator( |
|
260 | @HasPermissionAnyDecorator( | |
261 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
261 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') | |
262 | def register_post(self): |
|
262 | def register_post(self): | |
263 | from rhodecode.authentication.plugins import auth_rhodecode |
|
263 | from rhodecode.authentication.plugins import auth_rhodecode | |
264 |
|
264 | |||
265 | self.load_default_context() |
|
265 | self.load_default_context() | |
266 | captcha = self._get_captcha_data() |
|
266 | captcha = self._get_captcha_data() | |
267 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
267 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ | |
268 | .AuthUser().permissions['global'] |
|
268 | .AuthUser().permissions['global'] | |
269 |
|
269 | |||
270 | extern_name = auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
270 | extern_name = auth_rhodecode.RhodeCodeAuthPlugin.uid | |
271 | extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
271 | extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid | |
272 |
|
272 | |||
273 | register_form = RegisterForm(self.request.translate)() |
|
273 | register_form = RegisterForm(self.request.translate)() | |
274 | try: |
|
274 | try: | |
275 |
|
275 | |||
276 | form_result = register_form.to_python(self.request.POST) |
|
276 | form_result = register_form.to_python(self.request.POST) | |
277 | form_result['active'] = auto_active |
|
277 | form_result['active'] = auto_active | |
278 | external_identity = self.request.POST.get('external_identity') |
|
278 | external_identity = self.request.POST.get('external_identity') | |
279 |
|
279 | |||
280 | if external_identity: |
|
280 | if external_identity: | |
281 | extern_name = external_identity |
|
281 | extern_name = external_identity | |
282 | extern_type = external_identity |
|
282 | extern_type = external_identity | |
283 |
|
283 | |||
284 | if captcha.active: |
|
284 | if captcha.active: | |
285 | captcha_status, captcha_message = self.validate_captcha( |
|
285 | captcha_status, captcha_message = self.validate_captcha( | |
286 | captcha.private_key) |
|
286 | captcha.private_key) | |
287 |
|
287 | |||
288 | if not captcha_status: |
|
288 | if not captcha_status: | |
289 | _value = form_result |
|
289 | _value = form_result | |
290 | _msg = _('Bad captcha') |
|
290 | _msg = _('Bad captcha') | |
291 | error_dict = {'recaptcha_field': captcha_message} |
|
291 | error_dict = {'recaptcha_field': captcha_message} | |
292 | raise formencode.Invalid( |
|
292 | raise formencode.Invalid( | |
293 | _msg, _value, None, error_dict=error_dict) |
|
293 | _msg, _value, None, error_dict=error_dict) | |
294 |
|
294 | |||
295 | new_user = UserModel().create_registration( |
|
295 | new_user = UserModel().create_registration( | |
296 | form_result, extern_name=extern_name, extern_type=extern_type) |
|
296 | form_result, extern_name=extern_name, extern_type=extern_type) | |
297 |
|
297 | |||
298 | action_data = {'data': new_user.get_api_data(), |
|
298 | action_data = {'data': new_user.get_api_data(), | |
299 | 'user_agent': self.request.user_agent} |
|
299 | 'user_agent': self.request.user_agent} | |
300 |
|
300 | |||
301 | if external_identity: |
|
301 | if external_identity: | |
302 | action_data['external_identity'] = external_identity |
|
302 | action_data['external_identity'] = external_identity | |
303 |
|
303 | |||
304 | audit_user = audit_logger.UserWrap( |
|
304 | audit_user = audit_logger.UserWrap( | |
305 | username=new_user.username, |
|
305 | username=new_user.username, | |
306 | user_id=new_user.user_id, |
|
306 | user_id=new_user.user_id, | |
307 | ip_addr=self.request.remote_addr) |
|
307 | ip_addr=self.request.remote_addr) | |
308 |
|
308 | |||
309 | audit_logger.store_web( |
|
309 | audit_logger.store_web( | |
310 | 'user.register', action_data=action_data, |
|
310 | 'user.register', action_data=action_data, | |
311 | user=audit_user) |
|
311 | user=audit_user) | |
312 |
|
312 | |||
313 | event = UserRegistered(user=new_user, session=self.session) |
|
313 | event = UserRegistered(user=new_user, session=self.session) | |
314 | trigger(event) |
|
314 | trigger(event) | |
315 | h.flash( |
|
315 | h.flash( | |
316 | _('You have successfully registered with RhodeCode. You can log-in now.'), |
|
316 | _('You have successfully registered with RhodeCode. You can log-in now.'), | |
317 | category='success') |
|
317 | category='success') | |
318 | if external_identity: |
|
318 | if external_identity: | |
319 | h.flash( |
|
319 | h.flash( | |
320 | _('Please use the {identity} button to log-in').format( |
|
320 | _('Please use the {identity} button to log-in').format( | |
321 | identity=external_identity), |
|
321 | identity=external_identity), | |
322 | category='success') |
|
322 | category='success') | |
323 | Session().commit() |
|
323 | Session().commit() | |
324 |
|
324 | |||
325 | redirect_ro = self.request.route_path('login') |
|
325 | redirect_ro = self.request.route_path('login') | |
326 | raise HTTPFound(redirect_ro) |
|
326 | raise HTTPFound(redirect_ro) | |
327 |
|
327 | |||
328 | except formencode.Invalid as errors: |
|
328 | except formencode.Invalid as errors: | |
329 | errors.value.pop('password', None) |
|
329 | errors.value.pop('password', None) | |
330 | errors.value.pop('password_confirmation', None) |
|
330 | errors.value.pop('password_confirmation', None) | |
331 | return self.register( |
|
331 | return self.register( | |
332 | defaults=errors.value, errors=errors.error_dict) |
|
332 | defaults=errors.value, errors=errors.error_dict) | |
333 |
|
333 | |||
334 | except UserCreationError as e: |
|
334 | except UserCreationError as e: | |
335 | # container auth or other auth functions that create users on |
|
335 | # container auth or other auth functions that create users on | |
336 | # the fly can throw this exception signaling that there's issue |
|
336 | # the fly can throw this exception signaling that there's issue | |
337 | # with user creation, explanation should be provided in |
|
337 | # with user creation, explanation should be provided in | |
338 | # Exception itself |
|
338 | # Exception itself | |
339 | h.flash(e, category='error') |
|
339 | h.flash(e, category='error') | |
340 | return self.register() |
|
340 | return self.register() | |
341 |
|
341 | |||
342 | def password_reset(self): |
|
342 | def password_reset(self): | |
343 | c = self.load_default_context() |
|
343 | c = self.load_default_context() | |
344 | captcha = self._get_captcha_data() |
|
344 | captcha = self._get_captcha_data() | |
345 |
|
345 | |||
346 | template_context = { |
|
346 | template_context = { | |
347 | 'captcha_active': captcha.active, |
|
347 | 'captcha_active': captcha.active, | |
348 | 'captcha_public_key': captcha.public_key, |
|
348 | 'captcha_public_key': captcha.public_key, | |
349 | 'defaults': {}, |
|
349 | 'defaults': {}, | |
350 | 'errors': {}, |
|
350 | 'errors': {}, | |
351 | } |
|
351 | } | |
352 |
|
352 | |||
353 | # always send implicit message to prevent from discovery of |
|
353 | # always send implicit message to prevent from discovery of | |
354 | # matching emails |
|
354 | # matching emails | |
355 | msg = _('If such email exists, a password reset link was sent to it.') |
|
355 | msg = _('If such email exists, a password reset link was sent to it.') | |
356 |
|
356 | |||
357 | def default_response(): |
|
357 | def default_response(): | |
358 | log.debug('faking response on invalid password reset') |
|
358 | log.debug('faking response on invalid password reset') | |
359 | # make this take 2s, to prevent brute forcing. |
|
359 | # make this take 2s, to prevent brute forcing. | |
360 | time.sleep(2) |
|
360 | time.sleep(2) | |
361 | h.flash(msg, category='success') |
|
361 | h.flash(msg, category='success') | |
362 | return HTTPFound(self.request.route_path('reset_password')) |
|
362 | return HTTPFound(self.request.route_path('reset_password')) | |
363 |
|
363 | |||
364 | if self.request.POST: |
|
364 | if self.request.POST: | |
365 | if h.HasPermissionAny('hg.password_reset.disabled')(): |
|
365 | if h.HasPermissionAny('hg.password_reset.disabled')(): | |
366 | _email = self.request.POST.get('email', '') |
|
366 | _email = self.request.POST.get('email', '') | |
367 | log.error('Failed attempt to reset password for `%s`.', _email) |
|
367 | log.error('Failed attempt to reset password for `%s`.', _email) | |
368 | h.flash(_('Password reset has been disabled.'), category='error') |
|
368 | h.flash(_('Password reset has been disabled.'), category='error') | |
369 | return HTTPFound(self.request.route_path('reset_password')) |
|
369 | return HTTPFound(self.request.route_path('reset_password')) | |
370 |
|
370 | |||
371 | password_reset_form = PasswordResetForm(self.request.translate)() |
|
371 | password_reset_form = PasswordResetForm(self.request.translate)() | |
372 | description = u'Generated token for password reset from {}'.format( |
|
372 | description = u'Generated token for password reset from {}'.format( | |
373 | datetime.datetime.now().isoformat()) |
|
373 | datetime.datetime.now().isoformat()) | |
374 |
|
374 | |||
375 | try: |
|
375 | try: | |
376 | form_result = password_reset_form.to_python( |
|
376 | form_result = password_reset_form.to_python( | |
377 | self.request.POST) |
|
377 | self.request.POST) | |
378 | user_email = form_result['email'] |
|
378 | user_email = form_result['email'] | |
379 |
|
379 | |||
380 | if captcha.active: |
|
380 | if captcha.active: | |
381 | captcha_status, captcha_message = self.validate_captcha( |
|
381 | captcha_status, captcha_message = self.validate_captcha( | |
382 | captcha.private_key) |
|
382 | captcha.private_key) | |
383 |
|
383 | |||
384 | if not captcha_status: |
|
384 | if not captcha_status: | |
385 | _value = form_result |
|
385 | _value = form_result | |
386 | _msg = _('Bad captcha') |
|
386 | _msg = _('Bad captcha') | |
387 | error_dict = {'recaptcha_field': captcha_message} |
|
387 | error_dict = {'recaptcha_field': captcha_message} | |
388 | raise formencode.Invalid( |
|
388 | raise formencode.Invalid( | |
389 | _msg, _value, None, error_dict=error_dict) |
|
389 | _msg, _value, None, error_dict=error_dict) | |
390 |
|
390 | |||
391 | # Generate reset URL and send mail. |
|
391 | # Generate reset URL and send mail. | |
392 | user = User.get_by_email(user_email) |
|
392 | user = User.get_by_email(user_email) | |
393 |
|
393 | |||
394 | # only allow rhodecode based users to reset their password |
|
394 | # only allow rhodecode based users to reset their password | |
395 | # external auth shouldn't allow password reset |
|
395 | # external auth shouldn't allow password reset | |
396 | if user and user.extern_type != auth_rhodecode.RhodeCodeAuthPlugin.uid: |
|
396 | if user and user.extern_type != auth_rhodecode.RhodeCodeAuthPlugin.uid: | |
397 | log.warning('User %s with external type `%s` tried a password reset. ' |
|
397 | log.warning('User %s with external type `%s` tried a password reset. ' | |
398 | 'This try was rejected', user, user.extern_type) |
|
398 | 'This try was rejected', user, user.extern_type) | |
399 | return default_response() |
|
399 | return default_response() | |
400 |
|
400 | |||
401 | # generate password reset token that expires in 10 minutes |
|
401 | # generate password reset token that expires in 10 minutes | |
402 | reset_token = UserModel().add_auth_token( |
|
402 | reset_token = UserModel().add_auth_token( | |
403 | user=user, lifetime_minutes=10, |
|
403 | user=user, lifetime_minutes=10, | |
404 | role=UserModel.auth_token_role.ROLE_PASSWORD_RESET, |
|
404 | role=UserModel.auth_token_role.ROLE_PASSWORD_RESET, | |
405 | description=description) |
|
405 | description=description) | |
406 | Session().commit() |
|
406 | Session().commit() | |
407 |
|
407 | |||
408 | log.debug('Successfully created password recovery token') |
|
408 | log.debug('Successfully created password recovery token') | |
409 | password_reset_url = self.request.route_url( |
|
409 | password_reset_url = self.request.route_url( | |
410 | 'reset_password_confirmation', |
|
410 | 'reset_password_confirmation', | |
411 | _query={'key': reset_token.api_key}) |
|
411 | _query={'key': reset_token.api_key}) | |
412 | UserModel().reset_password_link( |
|
412 | UserModel().reset_password_link( | |
413 | form_result, password_reset_url) |
|
413 | form_result, password_reset_url) | |
414 |
|
414 | |||
415 | action_data = {'email': user_email, |
|
415 | action_data = {'email': user_email, | |
416 | 'user_agent': self.request.user_agent} |
|
416 | 'user_agent': self.request.user_agent} | |
417 | audit_logger.store_web( |
|
417 | audit_logger.store_web( | |
418 | 'user.password.reset_request', action_data=action_data, |
|
418 | 'user.password.reset_request', action_data=action_data, | |
419 | user=self._rhodecode_user, commit=True) |
|
419 | user=self._rhodecode_user, commit=True) | |
420 |
|
420 | |||
421 | return default_response() |
|
421 | return default_response() | |
422 |
|
422 | |||
423 | except formencode.Invalid as errors: |
|
423 | except formencode.Invalid as errors: | |
424 | template_context.update({ |
|
424 | template_context.update({ | |
425 | 'defaults': errors.value, |
|
425 | 'defaults': errors.value, | |
426 | 'errors': errors.error_dict, |
|
426 | 'errors': errors.error_dict, | |
427 | }) |
|
427 | }) | |
428 | if not self.request.POST.get('email'): |
|
428 | if not self.request.POST.get('email'): | |
429 | # case of empty email, we want to report that |
|
429 | # case of empty email, we want to report that | |
430 | return self._get_template_context(c, **template_context) |
|
430 | return self._get_template_context(c, **template_context) | |
431 |
|
431 | |||
432 | if 'recaptcha_field' in errors.error_dict: |
|
432 | if 'recaptcha_field' in errors.error_dict: | |
433 | # case of failed captcha |
|
433 | # case of failed captcha | |
434 | return self._get_template_context(c, **template_context) |
|
434 | return self._get_template_context(c, **template_context) | |
435 |
|
435 | |||
436 | return default_response() |
|
436 | return default_response() | |
437 |
|
437 | |||
438 | return self._get_template_context(c, **template_context) |
|
438 | return self._get_template_context(c, **template_context) | |
439 |
|
439 | |||
440 | def password_reset_confirmation(self): |
|
440 | def password_reset_confirmation(self): | |
441 | self.load_default_context() |
|
441 | self.load_default_context() | |
442 | if self.request.GET and self.request.GET.get('key'): |
|
442 | if self.request.GET and self.request.GET.get('key'): | |
443 | # make this take 2s, to prevent brute forcing. |
|
443 | # make this take 2s, to prevent brute forcing. | |
444 | time.sleep(2) |
|
444 | time.sleep(2) | |
445 |
|
445 | |||
446 | token = AuthTokenModel().get_auth_token( |
|
446 | token = AuthTokenModel().get_auth_token( | |
447 | self.request.GET.get('key')) |
|
447 | self.request.GET.get('key')) | |
448 |
|
448 | |||
449 | # verify token is the correct role |
|
449 | # verify token is the correct role | |
450 | if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET: |
|
450 | if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET: | |
451 | log.debug('Got token with role:%s expected is %s', |
|
451 | log.debug('Got token with role:%s expected is %s', | |
452 | getattr(token, 'role', 'EMPTY_TOKEN'), |
|
452 | getattr(token, 'role', 'EMPTY_TOKEN'), | |
453 | UserApiKeys.ROLE_PASSWORD_RESET) |
|
453 | UserApiKeys.ROLE_PASSWORD_RESET) | |
454 | h.flash( |
|
454 | h.flash( | |
455 | _('Given reset token is invalid'), category='error') |
|
455 | _('Given reset token is invalid'), category='error') | |
456 | return HTTPFound(self.request.route_path('reset_password')) |
|
456 | return HTTPFound(self.request.route_path('reset_password')) | |
457 |
|
457 | |||
458 | try: |
|
458 | try: | |
459 | owner = token.user |
|
459 | owner = token.user | |
460 | data = {'email': owner.email, 'token': token.api_key} |
|
460 | data = {'email': owner.email, 'token': token.api_key} | |
461 | UserModel().reset_password(data) |
|
461 | UserModel().reset_password(data) | |
462 | h.flash( |
|
462 | h.flash( | |
463 | _('Your password reset was successful, ' |
|
463 | _('Your password reset was successful, ' | |
464 | 'a new password has been sent to your email'), |
|
464 | 'a new password has been sent to your email'), | |
465 | category='success') |
|
465 | category='success') | |
466 | except Exception as e: |
|
466 | except Exception as e: | |
467 | log.error(e) |
|
467 | log.error(e) | |
468 | return HTTPFound(self.request.route_path('reset_password')) |
|
468 | return HTTPFound(self.request.route_path('reset_password')) | |
469 |
|
469 | |||
470 | return HTTPFound(self.request.route_path('login')) |
|
470 | return HTTPFound(self.request.route_path('login')) |
@@ -1,258 +1,258 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import re |
|
22 | import re | |
23 | import sys |
|
23 | import sys | |
24 | import logging |
|
24 | import logging | |
25 | import signal |
|
25 | import signal | |
26 | import tempfile |
|
26 | import tempfile | |
27 | from subprocess import Popen, PIPE |
|
27 | from subprocess import Popen, PIPE | |
28 | import urlparse |
|
28 | import urllib.parse | |
29 |
|
29 | |||
30 | from .base import VcsServer |
|
30 | from .base import VcsServer | |
31 |
|
31 | |||
32 | log = logging.getLogger(__name__) |
|
32 | log = logging.getLogger(__name__) | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | class SubversionTunnelWrapper(object): |
|
35 | class SubversionTunnelWrapper(object): | |
36 | process = None |
|
36 | process = None | |
37 |
|
37 | |||
38 | def __init__(self, server): |
|
38 | def __init__(self, server): | |
39 | self.server = server |
|
39 | self.server = server | |
40 | self.timeout = 30 |
|
40 | self.timeout = 30 | |
41 | self.stdin = sys.stdin |
|
41 | self.stdin = sys.stdin | |
42 | self.stdout = sys.stdout |
|
42 | self.stdout = sys.stdout | |
43 | self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp() |
|
43 | self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp() | |
44 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp() |
|
44 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp() | |
45 |
|
45 | |||
46 | self.read_only = True # flag that we set to make the hooks readonly |
|
46 | self.read_only = True # flag that we set to make the hooks readonly | |
47 |
|
47 | |||
48 | def create_svn_config(self): |
|
48 | def create_svn_config(self): | |
49 | content = ( |
|
49 | content = ( | |
50 | '[general]\n' |
|
50 | '[general]\n' | |
51 | 'hooks-env = {}\n').format(self.hooks_env_path) |
|
51 | 'hooks-env = {}\n').format(self.hooks_env_path) | |
52 | with os.fdopen(self.svn_conf_fd, 'w') as config_file: |
|
52 | with os.fdopen(self.svn_conf_fd, 'w') as config_file: | |
53 | config_file.write(content) |
|
53 | config_file.write(content) | |
54 |
|
54 | |||
55 | def create_hooks_env(self): |
|
55 | def create_hooks_env(self): | |
56 | content = ( |
|
56 | content = ( | |
57 | '[default]\n' |
|
57 | '[default]\n' | |
58 | 'LANG = en_US.UTF-8\n') |
|
58 | 'LANG = en_US.UTF-8\n') | |
59 | if self.read_only: |
|
59 | if self.read_only: | |
60 | content += 'SSH_READ_ONLY = 1\n' |
|
60 | content += 'SSH_READ_ONLY = 1\n' | |
61 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: |
|
61 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: | |
62 | hooks_env_file.write(content) |
|
62 | hooks_env_file.write(content) | |
63 |
|
63 | |||
64 | def remove_configs(self): |
|
64 | def remove_configs(self): | |
65 | os.remove(self.svn_conf_path) |
|
65 | os.remove(self.svn_conf_path) | |
66 | os.remove(self.hooks_env_path) |
|
66 | os.remove(self.hooks_env_path) | |
67 |
|
67 | |||
68 | def command(self): |
|
68 | def command(self): | |
69 | root = self.server.get_root_store() |
|
69 | root = self.server.get_root_store() | |
70 | username = self.server.user.username |
|
70 | username = self.server.user.username | |
71 |
|
71 | |||
72 | command = [ |
|
72 | command = [ | |
73 | self.server.svn_path, '-t', |
|
73 | self.server.svn_path, '-t', | |
74 | '--config-file', self.svn_conf_path, |
|
74 | '--config-file', self.svn_conf_path, | |
75 | '--tunnel-user', username, |
|
75 | '--tunnel-user', username, | |
76 | '-r', root] |
|
76 | '-r', root] | |
77 | log.debug("Final CMD: %s", ' '.join(command)) |
|
77 | log.debug("Final CMD: %s", ' '.join(command)) | |
78 | return command |
|
78 | return command | |
79 |
|
79 | |||
80 | def start(self): |
|
80 | def start(self): | |
81 | command = self.command() |
|
81 | command = self.command() | |
82 | self.process = Popen(' '.join(command), stdin=PIPE, shell=True) |
|
82 | self.process = Popen(' '.join(command), stdin=PIPE, shell=True) | |
83 |
|
83 | |||
84 | def sync(self): |
|
84 | def sync(self): | |
85 | while self.process.poll() is None: |
|
85 | while self.process.poll() is None: | |
86 | next_byte = self.stdin.read(1) |
|
86 | next_byte = self.stdin.read(1) | |
87 | if not next_byte: |
|
87 | if not next_byte: | |
88 | break |
|
88 | break | |
89 | self.process.stdin.write(next_byte) |
|
89 | self.process.stdin.write(next_byte) | |
90 | self.remove_configs() |
|
90 | self.remove_configs() | |
91 |
|
91 | |||
92 | @property |
|
92 | @property | |
93 | def return_code(self): |
|
93 | def return_code(self): | |
94 | return self.process.returncode |
|
94 | return self.process.returncode | |
95 |
|
95 | |||
96 | def get_first_client_response(self): |
|
96 | def get_first_client_response(self): | |
97 | signal.signal(signal.SIGALRM, self.interrupt) |
|
97 | signal.signal(signal.SIGALRM, self.interrupt) | |
98 | signal.alarm(self.timeout) |
|
98 | signal.alarm(self.timeout) | |
99 | first_response = self._read_first_client_response() |
|
99 | first_response = self._read_first_client_response() | |
100 | signal.alarm(0) |
|
100 | signal.alarm(0) | |
101 | return (self._parse_first_client_response(first_response) |
|
101 | return (self._parse_first_client_response(first_response) | |
102 | if first_response else None) |
|
102 | if first_response else None) | |
103 |
|
103 | |||
104 | def patch_first_client_response(self, response, **kwargs): |
|
104 | def patch_first_client_response(self, response, **kwargs): | |
105 | self.create_hooks_env() |
|
105 | self.create_hooks_env() | |
106 | data = response.copy() |
|
106 | data = response.copy() | |
107 | data.update(kwargs) |
|
107 | data.update(kwargs) | |
108 | data['url'] = self._svn_string(data['url']) |
|
108 | data['url'] = self._svn_string(data['url']) | |
109 | data['ra_client'] = self._svn_string(data['ra_client']) |
|
109 | data['ra_client'] = self._svn_string(data['ra_client']) | |
110 | data['client'] = data['client'] or '' |
|
110 | data['client'] = data['client'] or '' | |
111 | buffer_ = ( |
|
111 | buffer_ = ( | |
112 | "( {version} ( {capabilities} ) {url}{ra_client}" |
|
112 | "( {version} ( {capabilities} ) {url}{ra_client}" | |
113 | "( {client}) ) ".format(**data)) |
|
113 | "( {client}) ) ".format(**data)) | |
114 | self.process.stdin.write(buffer_) |
|
114 | self.process.stdin.write(buffer_) | |
115 |
|
115 | |||
116 | def fail(self, message): |
|
116 | def fail(self, message): | |
117 | print("( failure ( ( 210005 {message} 0: 0 ) ) )".format( |
|
117 | print("( failure ( ( 210005 {message} 0: 0 ) ) )".format( | |
118 | message=self._svn_string(message))) |
|
118 | message=self._svn_string(message))) | |
119 | self.remove_configs() |
|
119 | self.remove_configs() | |
120 | self.process.kill() |
|
120 | self.process.kill() | |
121 | return 1 |
|
121 | return 1 | |
122 |
|
122 | |||
123 | def interrupt(self, signum, frame): |
|
123 | def interrupt(self, signum, frame): | |
124 | self.fail("Exited by timeout") |
|
124 | self.fail("Exited by timeout") | |
125 |
|
125 | |||
126 | def _svn_string(self, str_): |
|
126 | def _svn_string(self, str_): | |
127 | if not str_: |
|
127 | if not str_: | |
128 | return '' |
|
128 | return '' | |
129 | return '{length}:{string} '.format(length=len(str_), string=str_) |
|
129 | return '{length}:{string} '.format(length=len(str_), string=str_) | |
130 |
|
130 | |||
131 | def _read_first_client_response(self): |
|
131 | def _read_first_client_response(self): | |
132 | buffer_ = "" |
|
132 | buffer_ = "" | |
133 | brackets_stack = [] |
|
133 | brackets_stack = [] | |
134 | while True: |
|
134 | while True: | |
135 | next_byte = self.stdin.read(1) |
|
135 | next_byte = self.stdin.read(1) | |
136 | buffer_ += next_byte |
|
136 | buffer_ += next_byte | |
137 | if next_byte == "(": |
|
137 | if next_byte == "(": | |
138 | brackets_stack.append(next_byte) |
|
138 | brackets_stack.append(next_byte) | |
139 | elif next_byte == ")": |
|
139 | elif next_byte == ")": | |
140 | brackets_stack.pop() |
|
140 | brackets_stack.pop() | |
141 | elif next_byte == " " and not brackets_stack: |
|
141 | elif next_byte == " " and not brackets_stack: | |
142 | break |
|
142 | break | |
143 |
|
143 | |||
144 | return buffer_ |
|
144 | return buffer_ | |
145 |
|
145 | |||
146 | def _parse_first_client_response(self, buffer_): |
|
146 | def _parse_first_client_response(self, buffer_): | |
147 | """ |
|
147 | """ | |
148 | According to the Subversion RA protocol, the first request |
|
148 | According to the Subversion RA protocol, the first request | |
149 | should look like: |
|
149 | should look like: | |
150 |
|
150 | |||
151 | ( version:number ( cap:word ... ) url:string ? ra-client:string |
|
151 | ( version:number ( cap:word ... ) url:string ? ra-client:string | |
152 | ( ? client:string ) ) |
|
152 | ( ? client:string ) ) | |
153 |
|
153 | |||
154 | Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol |
|
154 | Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol | |
155 | """ |
|
155 | """ | |
156 | version_re = r'(?P<version>\d+)' |
|
156 | version_re = r'(?P<version>\d+)' | |
157 | capabilities_re = r'\(\s(?P<capabilities>[\w\d\-\ ]+)\s\)' |
|
157 | capabilities_re = r'\(\s(?P<capabilities>[\w\d\-\ ]+)\s\)' | |
158 | url_re = r'\d+\:(?P<url>[\W\w]+)' |
|
158 | url_re = r'\d+\:(?P<url>[\W\w]+)' | |
159 | ra_client_re = r'(\d+\:(?P<ra_client>[\W\w]+)\s)' |
|
159 | ra_client_re = r'(\d+\:(?P<ra_client>[\W\w]+)\s)' | |
160 | client_re = r'(\d+\:(?P<client>[\W\w]+)\s)*' |
|
160 | client_re = r'(\d+\:(?P<client>[\W\w]+)\s)*' | |
161 | regex = re.compile( |
|
161 | regex = re.compile( | |
162 | r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}' |
|
162 | r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}' | |
163 | r'\(\s{client}\)\s\)\s*$'.format( |
|
163 | r'\(\s{client}\)\s\)\s*$'.format( | |
164 | version=version_re, capabilities=capabilities_re, |
|
164 | version=version_re, capabilities=capabilities_re, | |
165 | url=url_re, ra_client=ra_client_re, client=client_re)) |
|
165 | url=url_re, ra_client=ra_client_re, client=client_re)) | |
166 | matcher = regex.match(buffer_) |
|
166 | matcher = regex.match(buffer_) | |
167 |
|
167 | |||
168 | return matcher.groupdict() if matcher else None |
|
168 | return matcher.groupdict() if matcher else None | |
169 |
|
169 | |||
170 | def _match_repo_name(self, url): |
|
170 | def _match_repo_name(self, url): | |
171 | """ |
|
171 | """ | |
172 | Given an server url, try to match it against ALL known repository names. |
|
172 | Given an server url, try to match it against ALL known repository names. | |
173 | This handles a tricky SVN case for SSH and subdir commits. |
|
173 | This handles a tricky SVN case for SSH and subdir commits. | |
174 | E.g if our repo name is my-svn-repo, a svn commit on file in a subdir would |
|
174 | E.g if our repo name is my-svn-repo, a svn commit on file in a subdir would | |
175 | result in the url with this subdir added. |
|
175 | result in the url with this subdir added. | |
176 | """ |
|
176 | """ | |
177 | # case 1 direct match, we don't do any "heavy" lookups |
|
177 | # case 1 direct match, we don't do any "heavy" lookups | |
178 | if url in self.server.user_permissions: |
|
178 | if url in self.server.user_permissions: | |
179 | return url |
|
179 | return url | |
180 |
|
180 | |||
181 | log.debug('Extracting repository name from subdir path %s', url) |
|
181 | log.debug('Extracting repository name from subdir path %s', url) | |
182 | # case 2 we check all permissions, and match closes possible case... |
|
182 | # case 2 we check all permissions, and match closes possible case... | |
183 | # NOTE(dan): In this case we only know that url has a subdir parts, it's safe |
|
183 | # NOTE(dan): In this case we only know that url has a subdir parts, it's safe | |
184 | # to assume that it will have the repo name as prefix, we ensure the prefix |
|
184 | # to assume that it will have the repo name as prefix, we ensure the prefix | |
185 | # for similar repositories isn't matched by adding a / |
|
185 | # for similar repositories isn't matched by adding a / | |
186 | # e.g subgroup/repo-name/ and subgroup/repo-name-1/ would work correct. |
|
186 | # e.g subgroup/repo-name/ and subgroup/repo-name-1/ would work correct. | |
187 | for repo_name in self.server.user_permissions: |
|
187 | for repo_name in self.server.user_permissions: | |
188 | repo_name_prefix = repo_name + '/' |
|
188 | repo_name_prefix = repo_name + '/' | |
189 | if url.startswith(repo_name_prefix): |
|
189 | if url.startswith(repo_name_prefix): | |
190 | log.debug('Found prefix %s match, returning proper repository name', |
|
190 | log.debug('Found prefix %s match, returning proper repository name', | |
191 | repo_name_prefix) |
|
191 | repo_name_prefix) | |
192 | return repo_name |
|
192 | return repo_name | |
193 |
|
193 | |||
194 | return |
|
194 | return | |
195 |
|
195 | |||
196 | def run(self, extras): |
|
196 | def run(self, extras): | |
197 | action = 'pull' |
|
197 | action = 'pull' | |
198 | self.create_svn_config() |
|
198 | self.create_svn_config() | |
199 | self.start() |
|
199 | self.start() | |
200 |
|
200 | |||
201 | first_response = self.get_first_client_response() |
|
201 | first_response = self.get_first_client_response() | |
202 | if not first_response: |
|
202 | if not first_response: | |
203 | return self.fail("Repository name cannot be extracted") |
|
203 | return self.fail("Repository name cannot be extracted") | |
204 |
|
204 | |||
205 | url_parts = urlparse.urlparse(first_response['url']) |
|
205 | url_parts = urllib.parse.urlparse.urlparse(first_response['url']) | |
206 |
|
206 | |||
207 | self.server.repo_name = self._match_repo_name(url_parts.path.strip('/')) |
|
207 | self.server.repo_name = self._match_repo_name(url_parts.path.strip('/')) | |
208 |
|
208 | |||
209 | exit_code = self.server._check_permissions(action) |
|
209 | exit_code = self.server._check_permissions(action) | |
210 | if exit_code: |
|
210 | if exit_code: | |
211 | return exit_code |
|
211 | return exit_code | |
212 |
|
212 | |||
213 | # set the readonly flag to False if we have proper permissions |
|
213 | # set the readonly flag to False if we have proper permissions | |
214 | if self.server.has_write_perm(): |
|
214 | if self.server.has_write_perm(): | |
215 | self.read_only = False |
|
215 | self.read_only = False | |
216 | self.server.update_environment(action=action, extras=extras) |
|
216 | self.server.update_environment(action=action, extras=extras) | |
217 |
|
217 | |||
218 | self.patch_first_client_response(first_response) |
|
218 | self.patch_first_client_response(first_response) | |
219 | self.sync() |
|
219 | self.sync() | |
220 | return self.return_code |
|
220 | return self.return_code | |
221 |
|
221 | |||
222 |
|
222 | |||
223 | class SubversionServer(VcsServer): |
|
223 | class SubversionServer(VcsServer): | |
224 | backend = 'svn' |
|
224 | backend = 'svn' | |
225 | repo_user_agent = 'svn' |
|
225 | repo_user_agent = 'svn' | |
226 |
|
226 | |||
227 | def __init__(self, store, ini_path, repo_name, |
|
227 | def __init__(self, store, ini_path, repo_name, | |
228 | user, user_permissions, config, env): |
|
228 | user, user_permissions, config, env): | |
229 | super(SubversionServer, self)\ |
|
229 | super(SubversionServer, self)\ | |
230 | .__init__(user, user_permissions, config, env) |
|
230 | .__init__(user, user_permissions, config, env) | |
231 | self.store = store |
|
231 | self.store = store | |
232 | self.ini_path = ini_path |
|
232 | self.ini_path = ini_path | |
233 | # NOTE(dan): repo_name at this point is empty, |
|
233 | # NOTE(dan): repo_name at this point is empty, | |
234 | # this is set later in .run() based from parsed input stream |
|
234 | # this is set later in .run() based from parsed input stream | |
235 | self.repo_name = repo_name |
|
235 | self.repo_name = repo_name | |
236 | self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn') |
|
236 | self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn') | |
237 |
|
237 | |||
238 | self.tunnel = SubversionTunnelWrapper(server=self) |
|
238 | self.tunnel = SubversionTunnelWrapper(server=self) | |
239 |
|
239 | |||
240 | def _handle_tunnel(self, extras): |
|
240 | def _handle_tunnel(self, extras): | |
241 |
|
241 | |||
242 | # pre-auth |
|
242 | # pre-auth | |
243 | action = 'pull' |
|
243 | action = 'pull' | |
244 | # Special case for SVN, we extract repo name at later stage |
|
244 | # Special case for SVN, we extract repo name at later stage | |
245 | # exit_code = self._check_permissions(action) |
|
245 | # exit_code = self._check_permissions(action) | |
246 | # if exit_code: |
|
246 | # if exit_code: | |
247 | # return exit_code, False |
|
247 | # return exit_code, False | |
248 |
|
248 | |||
249 | req = self.env['request'] |
|
249 | req = self.env['request'] | |
250 | server_url = req.host_url + req.script_name |
|
250 | server_url = req.host_url + req.script_name | |
251 | extras['server_url'] = server_url |
|
251 | extras['server_url'] = server_url | |
252 |
|
252 | |||
253 | log.debug('Using %s binaries from path %s', self.backend, self._path) |
|
253 | log.debug('Using %s binaries from path %s', self.backend, self._path) | |
254 | exit_code = self.tunnel.run(extras) |
|
254 | exit_code = self.tunnel.run(extras) | |
255 |
|
255 | |||
256 | return exit_code, action == "push" |
|
256 | return exit_code, action == "push" | |
257 |
|
257 | |||
258 |
|
258 |
@@ -1,580 +1,580 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | Renderer for markup languages with ability to parse using rst or markdown |
|
23 | Renderer for markup languages with ability to parse using rst or markdown | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import re |
|
26 | import re | |
27 | import os |
|
27 | import os | |
28 | import lxml |
|
28 | import lxml | |
29 | import logging |
|
29 | import logging | |
30 | import urlparse |
|
30 | import urllib.parse | |
31 | import bleach |
|
31 | import bleach | |
32 |
|
32 | |||
33 | from mako.lookup import TemplateLookup |
|
33 | from mako.lookup import TemplateLookup | |
34 | from mako.template import Template as MakoTemplate |
|
34 | from mako.template import Template as MakoTemplate | |
35 |
|
35 | |||
36 | from docutils.core import publish_parts |
|
36 | from docutils.core import publish_parts | |
37 | from docutils.parsers.rst import directives |
|
37 | from docutils.parsers.rst import directives | |
38 | from docutils import writers |
|
38 | from docutils import writers | |
39 | from docutils.writers import html4css1 |
|
39 | from docutils.writers import html4css1 | |
40 | import markdown |
|
40 | import markdown | |
41 |
|
41 | |||
42 | from rhodecode.lib.markdown_ext import GithubFlavoredMarkdownExtension |
|
42 | from rhodecode.lib.markdown_ext import GithubFlavoredMarkdownExtension | |
43 | from rhodecode.lib.utils2 import (safe_unicode, md5_safe, MENTIONS_REGEX) |
|
43 | from rhodecode.lib.utils2 import (safe_unicode, md5_safe, MENTIONS_REGEX) | |
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
46 |
|
46 | |||
47 | # default renderer used to generate automated comments |
|
47 | # default renderer used to generate automated comments | |
48 | DEFAULT_COMMENTS_RENDERER = 'rst' |
|
48 | DEFAULT_COMMENTS_RENDERER = 'rst' | |
49 |
|
49 | |||
50 | try: |
|
50 | try: | |
51 | from lxml.html import fromstring |
|
51 | from lxml.html import fromstring | |
52 | from lxml.html import tostring |
|
52 | from lxml.html import tostring | |
53 | except ImportError: |
|
53 | except ImportError: | |
54 | log.exception('Failed to import lxml') |
|
54 | log.exception('Failed to import lxml') | |
55 | fromstring = None |
|
55 | fromstring = None | |
56 | tostring = None |
|
56 | tostring = None | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | class CustomHTMLTranslator(writers.html4css1.HTMLTranslator): |
|
59 | class CustomHTMLTranslator(writers.html4css1.HTMLTranslator): | |
60 | """ |
|
60 | """ | |
61 | Custom HTML Translator used for sandboxing potential |
|
61 | Custom HTML Translator used for sandboxing potential | |
62 | JS injections in ref links |
|
62 | JS injections in ref links | |
63 | """ |
|
63 | """ | |
64 | def visit_literal_block(self, node): |
|
64 | def visit_literal_block(self, node): | |
65 | self.body.append(self.starttag(node, 'pre', CLASS='codehilite literal-block')) |
|
65 | self.body.append(self.starttag(node, 'pre', CLASS='codehilite literal-block')) | |
66 |
|
66 | |||
67 | def visit_reference(self, node): |
|
67 | def visit_reference(self, node): | |
68 | if 'refuri' in node.attributes: |
|
68 | if 'refuri' in node.attributes: | |
69 | refuri = node['refuri'] |
|
69 | refuri = node['refuri'] | |
70 | if ':' in refuri: |
|
70 | if ':' in refuri: | |
71 | prefix, link = refuri.lstrip().split(':', 1) |
|
71 | prefix, link = refuri.lstrip().split(':', 1) | |
72 | prefix = prefix or '' |
|
72 | prefix = prefix or '' | |
73 |
|
73 | |||
74 | if prefix.lower() == 'javascript': |
|
74 | if prefix.lower() == 'javascript': | |
75 | # we don't allow javascript type of refs... |
|
75 | # we don't allow javascript type of refs... | |
76 | node['refuri'] = 'javascript:alert("SandBoxedJavascript")' |
|
76 | node['refuri'] = 'javascript:alert("SandBoxedJavascript")' | |
77 |
|
77 | |||
78 | # old style class requires this... |
|
78 | # old style class requires this... | |
79 | return html4css1.HTMLTranslator.visit_reference(self, node) |
|
79 | return html4css1.HTMLTranslator.visit_reference(self, node) | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | class RhodeCodeWriter(writers.html4css1.Writer): |
|
82 | class RhodeCodeWriter(writers.html4css1.Writer): | |
83 | def __init__(self): |
|
83 | def __init__(self): | |
84 | writers.Writer.__init__(self) |
|
84 | writers.Writer.__init__(self) | |
85 | self.translator_class = CustomHTMLTranslator |
|
85 | self.translator_class = CustomHTMLTranslator | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def relative_links(html_source, server_paths): |
|
88 | def relative_links(html_source, server_paths): | |
89 | if not html_source: |
|
89 | if not html_source: | |
90 | return html_source |
|
90 | return html_source | |
91 |
|
91 | |||
92 | if not fromstring and tostring: |
|
92 | if not fromstring and tostring: | |
93 | return html_source |
|
93 | return html_source | |
94 |
|
94 | |||
95 | try: |
|
95 | try: | |
96 | doc = lxml.html.fromstring(html_source) |
|
96 | doc = lxml.html.fromstring(html_source) | |
97 | except Exception: |
|
97 | except Exception: | |
98 | return html_source |
|
98 | return html_source | |
99 |
|
99 | |||
100 | for el in doc.cssselect('img, video'): |
|
100 | for el in doc.cssselect('img, video'): | |
101 | src = el.attrib.get('src') |
|
101 | src = el.attrib.get('src') | |
102 | if src: |
|
102 | if src: | |
103 | el.attrib['src'] = relative_path(src, server_paths['raw']) |
|
103 | el.attrib['src'] = relative_path(src, server_paths['raw']) | |
104 |
|
104 | |||
105 | for el in doc.cssselect('a:not(.gfm)'): |
|
105 | for el in doc.cssselect('a:not(.gfm)'): | |
106 | src = el.attrib.get('href') |
|
106 | src = el.attrib.get('href') | |
107 | if src: |
|
107 | if src: | |
108 | raw_mode = el.attrib['href'].endswith('?raw=1') |
|
108 | raw_mode = el.attrib['href'].endswith('?raw=1') | |
109 | if raw_mode: |
|
109 | if raw_mode: | |
110 | el.attrib['href'] = relative_path(src, server_paths['raw']) |
|
110 | el.attrib['href'] = relative_path(src, server_paths['raw']) | |
111 | else: |
|
111 | else: | |
112 | el.attrib['href'] = relative_path(src, server_paths['standard']) |
|
112 | el.attrib['href'] = relative_path(src, server_paths['standard']) | |
113 |
|
113 | |||
114 | return lxml.html.tostring(doc) |
|
114 | return lxml.html.tostring(doc) | |
115 |
|
115 | |||
116 |
|
116 | |||
117 | def relative_path(path, request_path, is_repo_file=None): |
|
117 | def relative_path(path, request_path, is_repo_file=None): | |
118 | """ |
|
118 | """ | |
119 | relative link support, path is a rel path, and request_path is current |
|
119 | relative link support, path is a rel path, and request_path is current | |
120 | server path (not absolute) |
|
120 | server path (not absolute) | |
121 |
|
121 | |||
122 | e.g. |
|
122 | e.g. | |
123 |
|
123 | |||
124 | path = '../logo.png' |
|
124 | path = '../logo.png' | |
125 | request_path= '/repo/files/path/file.md' |
|
125 | request_path= '/repo/files/path/file.md' | |
126 | produces: '/repo/files/logo.png' |
|
126 | produces: '/repo/files/logo.png' | |
127 | """ |
|
127 | """ | |
128 | # TODO(marcink): unicode/str support ? |
|
128 | # TODO(marcink): unicode/str support ? | |
129 | # maybe=> safe_unicode(urllib.quote(safe_str(final_path), '/:')) |
|
129 | # maybe=> safe_unicode(urllib.quote(safe_str(final_path), '/:')) | |
130 |
|
130 | |||
131 | def dummy_check(p): |
|
131 | def dummy_check(p): | |
132 | return True # assume default is a valid file path |
|
132 | return True # assume default is a valid file path | |
133 |
|
133 | |||
134 | is_repo_file = is_repo_file or dummy_check |
|
134 | is_repo_file = is_repo_file or dummy_check | |
135 | if not path: |
|
135 | if not path: | |
136 | return request_path |
|
136 | return request_path | |
137 |
|
137 | |||
138 | path = safe_unicode(path) |
|
138 | path = safe_unicode(path) | |
139 | request_path = safe_unicode(request_path) |
|
139 | request_path = safe_unicode(request_path) | |
140 |
|
140 | |||
141 | if path.startswith((u'data:', u'javascript:', u'#', u':')): |
|
141 | if path.startswith((u'data:', u'javascript:', u'#', u':')): | |
142 | # skip data, anchor, invalid links |
|
142 | # skip data, anchor, invalid links | |
143 | return path |
|
143 | return path | |
144 |
|
144 | |||
145 | is_absolute = bool(urlparse.urlparse(path).netloc) |
|
145 | is_absolute = bool(urllib.parse.urlparse.urlparse(path).netloc) | |
146 | if is_absolute: |
|
146 | if is_absolute: | |
147 | return path |
|
147 | return path | |
148 |
|
148 | |||
149 | if not request_path: |
|
149 | if not request_path: | |
150 | return path |
|
150 | return path | |
151 |
|
151 | |||
152 | if path.startswith(u'/'): |
|
152 | if path.startswith(u'/'): | |
153 | path = path[1:] |
|
153 | path = path[1:] | |
154 |
|
154 | |||
155 | if path.startswith(u'./'): |
|
155 | if path.startswith(u'./'): | |
156 | path = path[2:] |
|
156 | path = path[2:] | |
157 |
|
157 | |||
158 | parts = request_path.split('/') |
|
158 | parts = request_path.split('/') | |
159 | # compute how deep we need to traverse the request_path |
|
159 | # compute how deep we need to traverse the request_path | |
160 | depth = 0 |
|
160 | depth = 0 | |
161 |
|
161 | |||
162 | if is_repo_file(request_path): |
|
162 | if is_repo_file(request_path): | |
163 | # if request path is a VALID file, we use a relative path with |
|
163 | # if request path is a VALID file, we use a relative path with | |
164 | # one level up |
|
164 | # one level up | |
165 | depth += 1 |
|
165 | depth += 1 | |
166 |
|
166 | |||
167 | while path.startswith(u'../'): |
|
167 | while path.startswith(u'../'): | |
168 | depth += 1 |
|
168 | depth += 1 | |
169 | path = path[3:] |
|
169 | path = path[3:] | |
170 |
|
170 | |||
171 | if depth > 0: |
|
171 | if depth > 0: | |
172 | parts = parts[:-depth] |
|
172 | parts = parts[:-depth] | |
173 |
|
173 | |||
174 | parts.append(path) |
|
174 | parts.append(path) | |
175 | final_path = u'/'.join(parts).lstrip(u'/') |
|
175 | final_path = u'/'.join(parts).lstrip(u'/') | |
176 |
|
176 | |||
177 | return u'/' + final_path |
|
177 | return u'/' + final_path | |
178 |
|
178 | |||
179 |
|
179 | |||
180 | _cached_markdown_renderer = None |
|
180 | _cached_markdown_renderer = None | |
181 |
|
181 | |||
182 |
|
182 | |||
183 | def get_markdown_renderer(extensions, output_format): |
|
183 | def get_markdown_renderer(extensions, output_format): | |
184 | global _cached_markdown_renderer |
|
184 | global _cached_markdown_renderer | |
185 |
|
185 | |||
186 | if _cached_markdown_renderer is None: |
|
186 | if _cached_markdown_renderer is None: | |
187 | _cached_markdown_renderer = markdown.Markdown( |
|
187 | _cached_markdown_renderer = markdown.Markdown( | |
188 | extensions=extensions, |
|
188 | extensions=extensions, | |
189 | enable_attributes=False, output_format=output_format) |
|
189 | enable_attributes=False, output_format=output_format) | |
190 | return _cached_markdown_renderer |
|
190 | return _cached_markdown_renderer | |
191 |
|
191 | |||
192 |
|
192 | |||
193 | _cached_markdown_renderer_flavored = None |
|
193 | _cached_markdown_renderer_flavored = None | |
194 |
|
194 | |||
195 |
|
195 | |||
196 | def get_markdown_renderer_flavored(extensions, output_format): |
|
196 | def get_markdown_renderer_flavored(extensions, output_format): | |
197 | global _cached_markdown_renderer_flavored |
|
197 | global _cached_markdown_renderer_flavored | |
198 |
|
198 | |||
199 | if _cached_markdown_renderer_flavored is None: |
|
199 | if _cached_markdown_renderer_flavored is None: | |
200 | _cached_markdown_renderer_flavored = markdown.Markdown( |
|
200 | _cached_markdown_renderer_flavored = markdown.Markdown( | |
201 | extensions=extensions + [GithubFlavoredMarkdownExtension()], |
|
201 | extensions=extensions + [GithubFlavoredMarkdownExtension()], | |
202 | enable_attributes=False, output_format=output_format) |
|
202 | enable_attributes=False, output_format=output_format) | |
203 | return _cached_markdown_renderer_flavored |
|
203 | return _cached_markdown_renderer_flavored | |
204 |
|
204 | |||
205 |
|
205 | |||
206 | class MarkupRenderer(object): |
|
206 | class MarkupRenderer(object): | |
207 | RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw'] |
|
207 | RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw'] | |
208 |
|
208 | |||
209 | MARKDOWN_PAT = re.compile(r'\.(md|mkdn?|mdown|markdown)$', re.IGNORECASE) |
|
209 | MARKDOWN_PAT = re.compile(r'\.(md|mkdn?|mdown|markdown)$', re.IGNORECASE) | |
210 | RST_PAT = re.compile(r'\.re?st$', re.IGNORECASE) |
|
210 | RST_PAT = re.compile(r'\.re?st$', re.IGNORECASE) | |
211 | JUPYTER_PAT = re.compile(r'\.(ipynb)$', re.IGNORECASE) |
|
211 | JUPYTER_PAT = re.compile(r'\.(ipynb)$', re.IGNORECASE) | |
212 | PLAIN_PAT = re.compile(r'^readme$', re.IGNORECASE) |
|
212 | PLAIN_PAT = re.compile(r'^readme$', re.IGNORECASE) | |
213 |
|
213 | |||
214 | URL_PAT = re.compile(r'(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]' |
|
214 | URL_PAT = re.compile(r'(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]' | |
215 | r'|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)') |
|
215 | r'|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)') | |
216 |
|
216 | |||
217 | MENTION_PAT = re.compile(MENTIONS_REGEX) |
|
217 | MENTION_PAT = re.compile(MENTIONS_REGEX) | |
218 |
|
218 | |||
219 | extensions = ['markdown.extensions.codehilite', 'markdown.extensions.extra', |
|
219 | extensions = ['markdown.extensions.codehilite', 'markdown.extensions.extra', | |
220 | 'markdown.extensions.def_list', 'markdown.extensions.sane_lists'] |
|
220 | 'markdown.extensions.def_list', 'markdown.extensions.sane_lists'] | |
221 |
|
221 | |||
222 | output_format = 'html4' |
|
222 | output_format = 'html4' | |
223 |
|
223 | |||
224 | # extension together with weights. Lower is first means we control how |
|
224 | # extension together with weights. Lower is first means we control how | |
225 | # extensions are attached to readme names with those. |
|
225 | # extensions are attached to readme names with those. | |
226 | PLAIN_EXTS = [ |
|
226 | PLAIN_EXTS = [ | |
227 | # prefer no extension |
|
227 | # prefer no extension | |
228 | ('', 0), # special case that renders READMES names without extension |
|
228 | ('', 0), # special case that renders READMES names without extension | |
229 | ('.text', 2), ('.TEXT', 2), |
|
229 | ('.text', 2), ('.TEXT', 2), | |
230 | ('.txt', 3), ('.TXT', 3) |
|
230 | ('.txt', 3), ('.TXT', 3) | |
231 | ] |
|
231 | ] | |
232 |
|
232 | |||
233 | RST_EXTS = [ |
|
233 | RST_EXTS = [ | |
234 | ('.rst', 1), ('.rest', 1), |
|
234 | ('.rst', 1), ('.rest', 1), | |
235 | ('.RST', 2), ('.REST', 2) |
|
235 | ('.RST', 2), ('.REST', 2) | |
236 | ] |
|
236 | ] | |
237 |
|
237 | |||
238 | MARKDOWN_EXTS = [ |
|
238 | MARKDOWN_EXTS = [ | |
239 | ('.md', 1), ('.MD', 1), |
|
239 | ('.md', 1), ('.MD', 1), | |
240 | ('.mkdn', 2), ('.MKDN', 2), |
|
240 | ('.mkdn', 2), ('.MKDN', 2), | |
241 | ('.mdown', 3), ('.MDOWN', 3), |
|
241 | ('.mdown', 3), ('.MDOWN', 3), | |
242 | ('.markdown', 4), ('.MARKDOWN', 4) |
|
242 | ('.markdown', 4), ('.MARKDOWN', 4) | |
243 | ] |
|
243 | ] | |
244 |
|
244 | |||
245 | def _detect_renderer(self, source, filename=None): |
|
245 | def _detect_renderer(self, source, filename=None): | |
246 | """ |
|
246 | """ | |
247 | runs detection of what renderer should be used for generating html |
|
247 | runs detection of what renderer should be used for generating html | |
248 | from a markup language |
|
248 | from a markup language | |
249 |
|
249 | |||
250 | filename can be also explicitly a renderer name |
|
250 | filename can be also explicitly a renderer name | |
251 |
|
251 | |||
252 | :param source: |
|
252 | :param source: | |
253 | :param filename: |
|
253 | :param filename: | |
254 | """ |
|
254 | """ | |
255 |
|
255 | |||
256 | if MarkupRenderer.MARKDOWN_PAT.findall(filename): |
|
256 | if MarkupRenderer.MARKDOWN_PAT.findall(filename): | |
257 | detected_renderer = 'markdown' |
|
257 | detected_renderer = 'markdown' | |
258 | elif MarkupRenderer.RST_PAT.findall(filename): |
|
258 | elif MarkupRenderer.RST_PAT.findall(filename): | |
259 | detected_renderer = 'rst' |
|
259 | detected_renderer = 'rst' | |
260 | elif MarkupRenderer.JUPYTER_PAT.findall(filename): |
|
260 | elif MarkupRenderer.JUPYTER_PAT.findall(filename): | |
261 | detected_renderer = 'jupyter' |
|
261 | detected_renderer = 'jupyter' | |
262 | elif MarkupRenderer.PLAIN_PAT.findall(filename): |
|
262 | elif MarkupRenderer.PLAIN_PAT.findall(filename): | |
263 | detected_renderer = 'plain' |
|
263 | detected_renderer = 'plain' | |
264 | else: |
|
264 | else: | |
265 | detected_renderer = 'plain' |
|
265 | detected_renderer = 'plain' | |
266 |
|
266 | |||
267 | return getattr(MarkupRenderer, detected_renderer) |
|
267 | return getattr(MarkupRenderer, detected_renderer) | |
268 |
|
268 | |||
269 | @classmethod |
|
269 | @classmethod | |
270 | def bleach_clean(cls, text): |
|
270 | def bleach_clean(cls, text): | |
271 | from .bleach_whitelist import markdown_attrs, markdown_tags |
|
271 | from .bleach_whitelist import markdown_attrs, markdown_tags | |
272 | allowed_tags = markdown_tags |
|
272 | allowed_tags = markdown_tags | |
273 | allowed_attrs = markdown_attrs |
|
273 | allowed_attrs = markdown_attrs | |
274 |
|
274 | |||
275 | try: |
|
275 | try: | |
276 | return bleach.clean(text, tags=allowed_tags, attributes=allowed_attrs) |
|
276 | return bleach.clean(text, tags=allowed_tags, attributes=allowed_attrs) | |
277 | except Exception: |
|
277 | except Exception: | |
278 | return 'UNPARSEABLE TEXT' |
|
278 | return 'UNPARSEABLE TEXT' | |
279 |
|
279 | |||
280 | @classmethod |
|
280 | @classmethod | |
281 | def renderer_from_filename(cls, filename, exclude): |
|
281 | def renderer_from_filename(cls, filename, exclude): | |
282 | """ |
|
282 | """ | |
283 | Detect renderer markdown/rst from filename and optionally use exclude |
|
283 | Detect renderer markdown/rst from filename and optionally use exclude | |
284 | list to remove some options. This is mostly used in helpers. |
|
284 | list to remove some options. This is mostly used in helpers. | |
285 | Returns None when no renderer can be detected. |
|
285 | Returns None when no renderer can be detected. | |
286 | """ |
|
286 | """ | |
287 | def _filter(elements): |
|
287 | def _filter(elements): | |
288 | if isinstance(exclude, (list, tuple)): |
|
288 | if isinstance(exclude, (list, tuple)): | |
289 | return [x for x in elements if x not in exclude] |
|
289 | return [x for x in elements if x not in exclude] | |
290 | return elements |
|
290 | return elements | |
291 |
|
291 | |||
292 | if filename.endswith( |
|
292 | if filename.endswith( | |
293 | tuple(_filter([x[0] for x in cls.MARKDOWN_EXTS if x[0]]))): |
|
293 | tuple(_filter([x[0] for x in cls.MARKDOWN_EXTS if x[0]]))): | |
294 | return 'markdown' |
|
294 | return 'markdown' | |
295 | if filename.endswith(tuple(_filter([x[0] for x in cls.RST_EXTS if x[0]]))): |
|
295 | if filename.endswith(tuple(_filter([x[0] for x in cls.RST_EXTS if x[0]]))): | |
296 | return 'rst' |
|
296 | return 'rst' | |
297 |
|
297 | |||
298 | return None |
|
298 | return None | |
299 |
|
299 | |||
300 | def render(self, source, filename=None): |
|
300 | def render(self, source, filename=None): | |
301 | """ |
|
301 | """ | |
302 | Renders a given filename using detected renderer |
|
302 | Renders a given filename using detected renderer | |
303 | it detects renderers based on file extension or mimetype. |
|
303 | it detects renderers based on file extension or mimetype. | |
304 | At last it will just do a simple html replacing new lines with <br/> |
|
304 | At last it will just do a simple html replacing new lines with <br/> | |
305 |
|
305 | |||
306 | :param file_name: |
|
306 | :param file_name: | |
307 | :param source: |
|
307 | :param source: | |
308 | """ |
|
308 | """ | |
309 |
|
309 | |||
310 | renderer = self._detect_renderer(source, filename) |
|
310 | renderer = self._detect_renderer(source, filename) | |
311 | readme_data = renderer(source) |
|
311 | readme_data = renderer(source) | |
312 | return readme_data |
|
312 | return readme_data | |
313 |
|
313 | |||
314 | @classmethod |
|
314 | @classmethod | |
315 | def _flavored_markdown(cls, text): |
|
315 | def _flavored_markdown(cls, text): | |
316 | """ |
|
316 | """ | |
317 | Github style flavored markdown |
|
317 | Github style flavored markdown | |
318 |
|
318 | |||
319 | :param text: |
|
319 | :param text: | |
320 | """ |
|
320 | """ | |
321 |
|
321 | |||
322 | # Extract pre blocks. |
|
322 | # Extract pre blocks. | |
323 | extractions = {} |
|
323 | extractions = {} | |
324 |
|
324 | |||
325 | def pre_extraction_callback(matchobj): |
|
325 | def pre_extraction_callback(matchobj): | |
326 | digest = md5_safe(matchobj.group(0)) |
|
326 | digest = md5_safe(matchobj.group(0)) | |
327 | extractions[digest] = matchobj.group(0) |
|
327 | extractions[digest] = matchobj.group(0) | |
328 | return "{gfm-extraction-%s}" % digest |
|
328 | return "{gfm-extraction-%s}" % digest | |
329 | pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL) |
|
329 | pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL) | |
330 | text = re.sub(pattern, pre_extraction_callback, text) |
|
330 | text = re.sub(pattern, pre_extraction_callback, text) | |
331 |
|
331 | |||
332 | # Prevent foo_bar_baz from ending up with an italic word in the middle. |
|
332 | # Prevent foo_bar_baz from ending up with an italic word in the middle. | |
333 | def italic_callback(matchobj): |
|
333 | def italic_callback(matchobj): | |
334 | s = matchobj.group(0) |
|
334 | s = matchobj.group(0) | |
335 | if list(s).count('_') >= 2: |
|
335 | if list(s).count('_') >= 2: | |
336 | return s.replace('_', r'\_') |
|
336 | return s.replace('_', r'\_') | |
337 | return s |
|
337 | return s | |
338 | text = re.sub(r'^(?! {4}|\t)\w+_\w+_\w[\w_]*', italic_callback, text) |
|
338 | text = re.sub(r'^(?! {4}|\t)\w+_\w+_\w[\w_]*', italic_callback, text) | |
339 |
|
339 | |||
340 | # Insert pre block extractions. |
|
340 | # Insert pre block extractions. | |
341 | def pre_insert_callback(matchobj): |
|
341 | def pre_insert_callback(matchobj): | |
342 | return '\n\n' + extractions[matchobj.group(1)] |
|
342 | return '\n\n' + extractions[matchobj.group(1)] | |
343 | text = re.sub(r'\{gfm-extraction-([0-9a-f]{32})\}', |
|
343 | text = re.sub(r'\{gfm-extraction-([0-9a-f]{32})\}', | |
344 | pre_insert_callback, text) |
|
344 | pre_insert_callback, text) | |
345 |
|
345 | |||
346 | return text |
|
346 | return text | |
347 |
|
347 | |||
348 | @classmethod |
|
348 | @classmethod | |
349 | def urlify_text(cls, text): |
|
349 | def urlify_text(cls, text): | |
350 | def url_func(match_obj): |
|
350 | def url_func(match_obj): | |
351 | url_full = match_obj.groups()[0] |
|
351 | url_full = match_obj.groups()[0] | |
352 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) |
|
352 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) | |
353 |
|
353 | |||
354 | return cls.URL_PAT.sub(url_func, text) |
|
354 | return cls.URL_PAT.sub(url_func, text) | |
355 |
|
355 | |||
356 | @classmethod |
|
356 | @classmethod | |
357 | def convert_mentions(cls, text, mode): |
|
357 | def convert_mentions(cls, text, mode): | |
358 | mention_pat = cls.MENTION_PAT |
|
358 | mention_pat = cls.MENTION_PAT | |
359 |
|
359 | |||
360 | def wrapp(match_obj): |
|
360 | def wrapp(match_obj): | |
361 | uname = match_obj.groups()[0] |
|
361 | uname = match_obj.groups()[0] | |
362 | hovercard_url = "pyroutes.url('hovercard_username', {'username': '%s'});" % uname |
|
362 | hovercard_url = "pyroutes.url('hovercard_username', {'username': '%s'});" % uname | |
363 |
|
363 | |||
364 | if mode == 'markdown': |
|
364 | if mode == 'markdown': | |
365 | tmpl = '<strong class="tooltip-hovercard" data-hovercard-alt="{uname}" data-hovercard-url="{hovercard_url}">@{uname}</strong>' |
|
365 | tmpl = '<strong class="tooltip-hovercard" data-hovercard-alt="{uname}" data-hovercard-url="{hovercard_url}">@{uname}</strong>' | |
366 | elif mode == 'rst': |
|
366 | elif mode == 'rst': | |
367 | tmpl = ' **@{uname}** ' |
|
367 | tmpl = ' **@{uname}** ' | |
368 | else: |
|
368 | else: | |
369 | raise ValueError('mode must be rst or markdown') |
|
369 | raise ValueError('mode must be rst or markdown') | |
370 |
|
370 | |||
371 | return tmpl.format(**{'uname': uname, |
|
371 | return tmpl.format(**{'uname': uname, | |
372 | 'hovercard_url': hovercard_url}) |
|
372 | 'hovercard_url': hovercard_url}) | |
373 |
|
373 | |||
374 | return mention_pat.sub(wrapp, text).strip() |
|
374 | return mention_pat.sub(wrapp, text).strip() | |
375 |
|
375 | |||
376 | @classmethod |
|
376 | @classmethod | |
377 | def plain(cls, source, universal_newline=True, leading_newline=True): |
|
377 | def plain(cls, source, universal_newline=True, leading_newline=True): | |
378 | source = safe_unicode(source) |
|
378 | source = safe_unicode(source) | |
379 | if universal_newline: |
|
379 | if universal_newline: | |
380 | newline = '\n' |
|
380 | newline = '\n' | |
381 | source = newline.join(source.splitlines()) |
|
381 | source = newline.join(source.splitlines()) | |
382 |
|
382 | |||
383 | rendered_source = cls.urlify_text(source) |
|
383 | rendered_source = cls.urlify_text(source) | |
384 | source = '' |
|
384 | source = '' | |
385 | if leading_newline: |
|
385 | if leading_newline: | |
386 | source += '<br />' |
|
386 | source += '<br />' | |
387 | source += rendered_source.replace("\n", '<br />') |
|
387 | source += rendered_source.replace("\n", '<br />') | |
388 |
|
388 | |||
389 | rendered = cls.bleach_clean(source) |
|
389 | rendered = cls.bleach_clean(source) | |
390 | return rendered |
|
390 | return rendered | |
391 |
|
391 | |||
392 | @classmethod |
|
392 | @classmethod | |
393 | def markdown(cls, source, safe=True, flavored=True, mentions=False, |
|
393 | def markdown(cls, source, safe=True, flavored=True, mentions=False, | |
394 | clean_html=True): |
|
394 | clean_html=True): | |
395 | """ |
|
395 | """ | |
396 | returns markdown rendered code cleaned by the bleach library |
|
396 | returns markdown rendered code cleaned by the bleach library | |
397 | """ |
|
397 | """ | |
398 |
|
398 | |||
399 | if flavored: |
|
399 | if flavored: | |
400 | markdown_renderer = get_markdown_renderer_flavored( |
|
400 | markdown_renderer = get_markdown_renderer_flavored( | |
401 | cls.extensions, cls.output_format) |
|
401 | cls.extensions, cls.output_format) | |
402 | else: |
|
402 | else: | |
403 | markdown_renderer = get_markdown_renderer( |
|
403 | markdown_renderer = get_markdown_renderer( | |
404 | cls.extensions, cls.output_format) |
|
404 | cls.extensions, cls.output_format) | |
405 |
|
405 | |||
406 | if mentions: |
|
406 | if mentions: | |
407 | mention_hl = cls.convert_mentions(source, mode='markdown') |
|
407 | mention_hl = cls.convert_mentions(source, mode='markdown') | |
408 | # we extracted mentions render with this using Mentions false |
|
408 | # we extracted mentions render with this using Mentions false | |
409 | return cls.markdown(mention_hl, safe=safe, flavored=flavored, |
|
409 | return cls.markdown(mention_hl, safe=safe, flavored=flavored, | |
410 | mentions=False) |
|
410 | mentions=False) | |
411 |
|
411 | |||
412 | source = safe_unicode(source) |
|
412 | source = safe_unicode(source) | |
413 |
|
413 | |||
414 | try: |
|
414 | try: | |
415 | if flavored: |
|
415 | if flavored: | |
416 | source = cls._flavored_markdown(source) |
|
416 | source = cls._flavored_markdown(source) | |
417 | rendered = markdown_renderer.convert(source) |
|
417 | rendered = markdown_renderer.convert(source) | |
418 | except Exception: |
|
418 | except Exception: | |
419 | log.exception('Error when rendering Markdown') |
|
419 | log.exception('Error when rendering Markdown') | |
420 | if safe: |
|
420 | if safe: | |
421 | log.debug('Fallback to render in plain mode') |
|
421 | log.debug('Fallback to render in plain mode') | |
422 | rendered = cls.plain(source) |
|
422 | rendered = cls.plain(source) | |
423 | else: |
|
423 | else: | |
424 | raise |
|
424 | raise | |
425 |
|
425 | |||
426 | if clean_html: |
|
426 | if clean_html: | |
427 | rendered = cls.bleach_clean(rendered) |
|
427 | rendered = cls.bleach_clean(rendered) | |
428 | return rendered |
|
428 | return rendered | |
429 |
|
429 | |||
430 | @classmethod |
|
430 | @classmethod | |
431 | def rst(cls, source, safe=True, mentions=False, clean_html=False): |
|
431 | def rst(cls, source, safe=True, mentions=False, clean_html=False): | |
432 | if mentions: |
|
432 | if mentions: | |
433 | mention_hl = cls.convert_mentions(source, mode='rst') |
|
433 | mention_hl = cls.convert_mentions(source, mode='rst') | |
434 | # we extracted mentions render with this using Mentions false |
|
434 | # we extracted mentions render with this using Mentions false | |
435 | return cls.rst(mention_hl, safe=safe, mentions=False) |
|
435 | return cls.rst(mention_hl, safe=safe, mentions=False) | |
436 |
|
436 | |||
437 | source = safe_unicode(source) |
|
437 | source = safe_unicode(source) | |
438 | try: |
|
438 | try: | |
439 | docutils_settings = dict( |
|
439 | docutils_settings = dict( | |
440 | [(alias, None) for alias in |
|
440 | [(alias, None) for alias in | |
441 | cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES]) |
|
441 | cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES]) | |
442 |
|
442 | |||
443 | docutils_settings.update({ |
|
443 | docutils_settings.update({ | |
444 | 'input_encoding': 'unicode', |
|
444 | 'input_encoding': 'unicode', | |
445 | 'report_level': 4, |
|
445 | 'report_level': 4, | |
446 | 'syntax_highlight': 'short', |
|
446 | 'syntax_highlight': 'short', | |
447 | }) |
|
447 | }) | |
448 |
|
448 | |||
449 | for k, v in docutils_settings.iteritems(): |
|
449 | for k, v in docutils_settings.iteritems(): | |
450 | directives.register_directive(k, v) |
|
450 | directives.register_directive(k, v) | |
451 |
|
451 | |||
452 | parts = publish_parts(source=source, |
|
452 | parts = publish_parts(source=source, | |
453 | writer=RhodeCodeWriter(), |
|
453 | writer=RhodeCodeWriter(), | |
454 | settings_overrides=docutils_settings) |
|
454 | settings_overrides=docutils_settings) | |
455 | rendered = parts["fragment"] |
|
455 | rendered = parts["fragment"] | |
456 | if clean_html: |
|
456 | if clean_html: | |
457 | rendered = cls.bleach_clean(rendered) |
|
457 | rendered = cls.bleach_clean(rendered) | |
458 | return parts['html_title'] + rendered |
|
458 | return parts['html_title'] + rendered | |
459 | except Exception: |
|
459 | except Exception: | |
460 | log.exception('Error when rendering RST') |
|
460 | log.exception('Error when rendering RST') | |
461 | if safe: |
|
461 | if safe: | |
462 | log.debug('Fallback to render in plain mode') |
|
462 | log.debug('Fallback to render in plain mode') | |
463 | return cls.plain(source) |
|
463 | return cls.plain(source) | |
464 | else: |
|
464 | else: | |
465 | raise |
|
465 | raise | |
466 |
|
466 | |||
467 | @classmethod |
|
467 | @classmethod | |
468 | def jupyter(cls, source, safe=True): |
|
468 | def jupyter(cls, source, safe=True): | |
469 | from rhodecode.lib import helpers |
|
469 | from rhodecode.lib import helpers | |
470 |
|
470 | |||
471 | from traitlets.config import Config |
|
471 | from traitlets.config import Config | |
472 | import nbformat |
|
472 | import nbformat | |
473 | from nbconvert import HTMLExporter |
|
473 | from nbconvert import HTMLExporter | |
474 | from nbconvert.preprocessors import Preprocessor |
|
474 | from nbconvert.preprocessors import Preprocessor | |
475 |
|
475 | |||
476 | class CustomHTMLExporter(HTMLExporter): |
|
476 | class CustomHTMLExporter(HTMLExporter): | |
477 | def _template_file_default(self): |
|
477 | def _template_file_default(self): | |
478 | return 'basic' |
|
478 | return 'basic' | |
479 |
|
479 | |||
480 | class Sandbox(Preprocessor): |
|
480 | class Sandbox(Preprocessor): | |
481 |
|
481 | |||
482 | def preprocess(self, nb, resources): |
|
482 | def preprocess(self, nb, resources): | |
483 | sandbox_text = 'SandBoxed(IPython.core.display.Javascript object)' |
|
483 | sandbox_text = 'SandBoxed(IPython.core.display.Javascript object)' | |
484 | for cell in nb['cells']: |
|
484 | for cell in nb['cells']: | |
485 | if not safe: |
|
485 | if not safe: | |
486 | continue |
|
486 | continue | |
487 |
|
487 | |||
488 | if 'outputs' in cell: |
|
488 | if 'outputs' in cell: | |
489 | for cell_output in cell['outputs']: |
|
489 | for cell_output in cell['outputs']: | |
490 | if 'data' in cell_output: |
|
490 | if 'data' in cell_output: | |
491 | if 'application/javascript' in cell_output['data']: |
|
491 | if 'application/javascript' in cell_output['data']: | |
492 | cell_output['data']['text/plain'] = sandbox_text |
|
492 | cell_output['data']['text/plain'] = sandbox_text | |
493 | cell_output['data'].pop('application/javascript', None) |
|
493 | cell_output['data'].pop('application/javascript', None) | |
494 |
|
494 | |||
495 | if 'source' in cell and cell['cell_type'] == 'markdown': |
|
495 | if 'source' in cell and cell['cell_type'] == 'markdown': | |
496 | # sanitize similar like in markdown |
|
496 | # sanitize similar like in markdown | |
497 | cell['source'] = cls.bleach_clean(cell['source']) |
|
497 | cell['source'] = cls.bleach_clean(cell['source']) | |
498 |
|
498 | |||
499 | return nb, resources |
|
499 | return nb, resources | |
500 |
|
500 | |||
501 | def _sanitize_resources(input_resources): |
|
501 | def _sanitize_resources(input_resources): | |
502 | """ |
|
502 | """ | |
503 | Skip/sanitize some of the CSS generated and included in jupyter |
|
503 | Skip/sanitize some of the CSS generated and included in jupyter | |
504 | so it doesn't messes up UI so much |
|
504 | so it doesn't messes up UI so much | |
505 | """ |
|
505 | """ | |
506 |
|
506 | |||
507 | # TODO(marcink): probably we should replace this with whole custom |
|
507 | # TODO(marcink): probably we should replace this with whole custom | |
508 | # CSS set that doesn't screw up, but jupyter generated html has some |
|
508 | # CSS set that doesn't screw up, but jupyter generated html has some | |
509 | # special markers, so it requires Custom HTML exporter template with |
|
509 | # special markers, so it requires Custom HTML exporter template with | |
510 | # _default_template_path_default, to achieve that |
|
510 | # _default_template_path_default, to achieve that | |
511 |
|
511 | |||
512 | # strip the reset CSS |
|
512 | # strip the reset CSS | |
513 | input_resources[0] = input_resources[0][input_resources[0].find('/*! Source'):] |
|
513 | input_resources[0] = input_resources[0][input_resources[0].find('/*! Source'):] | |
514 | return input_resources |
|
514 | return input_resources | |
515 |
|
515 | |||
516 | def as_html(notebook): |
|
516 | def as_html(notebook): | |
517 | conf = Config() |
|
517 | conf = Config() | |
518 | conf.CustomHTMLExporter.preprocessors = [Sandbox] |
|
518 | conf.CustomHTMLExporter.preprocessors = [Sandbox] | |
519 | html_exporter = CustomHTMLExporter(config=conf) |
|
519 | html_exporter = CustomHTMLExporter(config=conf) | |
520 |
|
520 | |||
521 | (body, resources) = html_exporter.from_notebook_node(notebook) |
|
521 | (body, resources) = html_exporter.from_notebook_node(notebook) | |
522 | header = '<!-- ## IPYTHON NOTEBOOK RENDERING ## -->' |
|
522 | header = '<!-- ## IPYTHON NOTEBOOK RENDERING ## -->' | |
523 | js = MakoTemplate(r''' |
|
523 | js = MakoTemplate(r''' | |
524 | <!-- MathJax configuration --> |
|
524 | <!-- MathJax configuration --> | |
525 | <script type="text/x-mathjax-config"> |
|
525 | <script type="text/x-mathjax-config"> | |
526 | MathJax.Hub.Config({ |
|
526 | MathJax.Hub.Config({ | |
527 | jax: ["input/TeX","output/HTML-CSS", "output/PreviewHTML"], |
|
527 | jax: ["input/TeX","output/HTML-CSS", "output/PreviewHTML"], | |
528 | extensions: ["tex2jax.js","MathMenu.js","MathZoom.js", "fast-preview.js", "AssistiveMML.js", "[Contrib]/a11y/accessibility-menu.js"], |
|
528 | extensions: ["tex2jax.js","MathMenu.js","MathZoom.js", "fast-preview.js", "AssistiveMML.js", "[Contrib]/a11y/accessibility-menu.js"], | |
529 | TeX: { |
|
529 | TeX: { | |
530 | extensions: ["AMSmath.js","AMSsymbols.js","noErrors.js","noUndefined.js"] |
|
530 | extensions: ["AMSmath.js","AMSsymbols.js","noErrors.js","noUndefined.js"] | |
531 | }, |
|
531 | }, | |
532 | tex2jax: { |
|
532 | tex2jax: { | |
533 | inlineMath: [ ['$','$'], ["\\(","\\)"] ], |
|
533 | inlineMath: [ ['$','$'], ["\\(","\\)"] ], | |
534 | displayMath: [ ['$$','$$'], ["\\[","\\]"] ], |
|
534 | displayMath: [ ['$$','$$'], ["\\[","\\]"] ], | |
535 | processEscapes: true, |
|
535 | processEscapes: true, | |
536 | processEnvironments: true |
|
536 | processEnvironments: true | |
537 | }, |
|
537 | }, | |
538 | // Center justify equations in code and markdown cells. Elsewhere |
|
538 | // Center justify equations in code and markdown cells. Elsewhere | |
539 | // we use CSS to left justify single line equations in code cells. |
|
539 | // we use CSS to left justify single line equations in code cells. | |
540 | displayAlign: 'center', |
|
540 | displayAlign: 'center', | |
541 | "HTML-CSS": { |
|
541 | "HTML-CSS": { | |
542 | styles: {'.MathJax_Display': {"margin": 0}}, |
|
542 | styles: {'.MathJax_Display': {"margin": 0}}, | |
543 | linebreaks: { automatic: true }, |
|
543 | linebreaks: { automatic: true }, | |
544 | availableFonts: ["STIX", "TeX"] |
|
544 | availableFonts: ["STIX", "TeX"] | |
545 | }, |
|
545 | }, | |
546 | showMathMenu: false |
|
546 | showMathMenu: false | |
547 | }); |
|
547 | }); | |
548 | </script> |
|
548 | </script> | |
549 | <!-- End of MathJax configuration --> |
|
549 | <!-- End of MathJax configuration --> | |
550 | <script src="${h.asset('js/src/math_jax/MathJax.js')}"></script> |
|
550 | <script src="${h.asset('js/src/math_jax/MathJax.js')}"></script> | |
551 | ''').render(h=helpers) |
|
551 | ''').render(h=helpers) | |
552 |
|
552 | |||
553 | css = MakoTemplate(r''' |
|
553 | css = MakoTemplate(r''' | |
554 | <link rel="stylesheet" type="text/css" href="${h.asset('css/style-ipython.css', ver=ver)}" media="screen"/> |
|
554 | <link rel="stylesheet" type="text/css" href="${h.asset('css/style-ipython.css', ver=ver)}" media="screen"/> | |
555 | ''').render(h=helpers, ver='ver1') |
|
555 | ''').render(h=helpers, ver='ver1') | |
556 |
|
556 | |||
557 | body = '\n'.join([header, css, js, body]) |
|
557 | body = '\n'.join([header, css, js, body]) | |
558 | return body, resources |
|
558 | return body, resources | |
559 |
|
559 | |||
560 | notebook = nbformat.reads(source, as_version=4) |
|
560 | notebook = nbformat.reads(source, as_version=4) | |
561 | (body, resources) = as_html(notebook) |
|
561 | (body, resources) = as_html(notebook) | |
562 | return body |
|
562 | return body | |
563 |
|
563 | |||
564 |
|
564 | |||
565 | class RstTemplateRenderer(object): |
|
565 | class RstTemplateRenderer(object): | |
566 |
|
566 | |||
567 | def __init__(self): |
|
567 | def __init__(self): | |
568 | base = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) |
|
568 | base = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | |
569 | rst_template_dirs = [os.path.join(base, 'templates', 'rst_templates')] |
|
569 | rst_template_dirs = [os.path.join(base, 'templates', 'rst_templates')] | |
570 | self.template_store = TemplateLookup( |
|
570 | self.template_store = TemplateLookup( | |
571 | directories=rst_template_dirs, |
|
571 | directories=rst_template_dirs, | |
572 | input_encoding='utf-8', |
|
572 | input_encoding='utf-8', | |
573 | imports=['from rhodecode.lib import helpers as h']) |
|
573 | imports=['from rhodecode.lib import helpers as h']) | |
574 |
|
574 | |||
575 | def _get_template(self, templatename): |
|
575 | def _get_template(self, templatename): | |
576 | return self.template_store.get_template(templatename) |
|
576 | return self.template_store.get_template(templatename) | |
577 |
|
577 | |||
578 | def render(self, template_name, **kwargs): |
|
578 | def render(self, template_name, **kwargs): | |
579 | template = self._get_template(template_name) |
|
579 | template = self._get_template(template_name) | |
580 | return template.render(**kwargs) |
|
580 | return template.render(**kwargs) |
@@ -1,156 +1,156 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | SimpleGit middleware for handling git protocol request (push/clone etc.) |
|
22 | SimpleGit middleware for handling git protocol request (push/clone etc.) | |
23 | It's implemented with basic auth function |
|
23 | It's implemented with basic auth function | |
24 | """ |
|
24 | """ | |
25 | import os |
|
25 | import os | |
26 | import re |
|
26 | import re | |
27 | import logging |
|
27 | import logging | |
28 | import urlparse |
|
28 | import urllib.parse | |
29 |
|
29 | |||
30 | import rhodecode |
|
30 | import rhodecode | |
31 | from rhodecode.lib import utils |
|
31 | from rhodecode.lib import utils | |
32 | from rhodecode.lib import utils2 |
|
32 | from rhodecode.lib import utils2 | |
33 | from rhodecode.lib.middleware import simplevcs |
|
33 | from rhodecode.lib.middleware import simplevcs | |
34 |
|
34 | |||
35 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | GIT_PROTO_PAT = re.compile( |
|
38 | GIT_PROTO_PAT = re.compile( | |
39 | r'^/(.+)/(info/refs|info/lfs/(.+)|git-upload-pack|git-receive-pack)') |
|
39 | r'^/(.+)/(info/refs|info/lfs/(.+)|git-upload-pack|git-receive-pack)') | |
40 | GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))') |
|
40 | GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))') | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | def default_lfs_store(): |
|
43 | def default_lfs_store(): | |
44 | """ |
|
44 | """ | |
45 | Default lfs store location, it's consistent with Mercurials large file |
|
45 | Default lfs store location, it's consistent with Mercurials large file | |
46 | store which is in .cache/largefiles |
|
46 | store which is in .cache/largefiles | |
47 | """ |
|
47 | """ | |
48 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
48 | from rhodecode.lib.vcs.backends.git import lfs_store | |
49 | user_home = os.path.expanduser("~") |
|
49 | user_home = os.path.expanduser("~") | |
50 | return lfs_store(user_home) |
|
50 | return lfs_store(user_home) | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | class SimpleGit(simplevcs.SimpleVCS): |
|
53 | class SimpleGit(simplevcs.SimpleVCS): | |
54 |
|
54 | |||
55 | SCM = 'git' |
|
55 | SCM = 'git' | |
56 |
|
56 | |||
57 | def _get_repository_name(self, environ): |
|
57 | def _get_repository_name(self, environ): | |
58 | """ |
|
58 | """ | |
59 | Gets repository name out of PATH_INFO header |
|
59 | Gets repository name out of PATH_INFO header | |
60 |
|
60 | |||
61 | :param environ: environ where PATH_INFO is stored |
|
61 | :param environ: environ where PATH_INFO is stored | |
62 | """ |
|
62 | """ | |
63 | repo_name = GIT_PROTO_PAT.match(environ['PATH_INFO']).group(1) |
|
63 | repo_name = GIT_PROTO_PAT.match(environ['PATH_INFO']).group(1) | |
64 | # for GIT LFS, and bare format strip .git suffix from names |
|
64 | # for GIT LFS, and bare format strip .git suffix from names | |
65 | if repo_name.endswith('.git'): |
|
65 | if repo_name.endswith('.git'): | |
66 | repo_name = repo_name[:-4] |
|
66 | repo_name = repo_name[:-4] | |
67 | return repo_name |
|
67 | return repo_name | |
68 |
|
68 | |||
69 | def _get_lfs_action(self, path, request_method): |
|
69 | def _get_lfs_action(self, path, request_method): | |
70 | """ |
|
70 | """ | |
71 | return an action based on LFS requests type. |
|
71 | return an action based on LFS requests type. | |
72 | Those routes are handled inside vcsserver app. |
|
72 | Those routes are handled inside vcsserver app. | |
73 |
|
73 | |||
74 | batch -> POST to /info/lfs/objects/batch => PUSH/PULL |
|
74 | batch -> POST to /info/lfs/objects/batch => PUSH/PULL | |
75 | batch is based on the `operation. |
|
75 | batch is based on the `operation. | |
76 | that could be download or upload, but those are only |
|
76 | that could be download or upload, but those are only | |
77 | instructions to fetch so we return pull always |
|
77 | instructions to fetch so we return pull always | |
78 |
|
78 | |||
79 | download -> GET to /info/lfs/{oid} => PULL |
|
79 | download -> GET to /info/lfs/{oid} => PULL | |
80 | upload -> PUT to /info/lfs/{oid} => PUSH |
|
80 | upload -> PUT to /info/lfs/{oid} => PUSH | |
81 |
|
81 | |||
82 | verification -> POST to /info/lfs/verify => PULL |
|
82 | verification -> POST to /info/lfs/verify => PULL | |
83 |
|
83 | |||
84 | """ |
|
84 | """ | |
85 |
|
85 | |||
86 | match_obj = GIT_LFS_PROTO_PAT.match(path) |
|
86 | match_obj = GIT_LFS_PROTO_PAT.match(path) | |
87 | _parts = match_obj.groups() |
|
87 | _parts = match_obj.groups() | |
88 | repo_name, path, operation = _parts |
|
88 | repo_name, path, operation = _parts | |
89 | log.debug( |
|
89 | log.debug( | |
90 | 'LFS: detecting operation based on following ' |
|
90 | 'LFS: detecting operation based on following ' | |
91 | 'data: %s, req_method:%s', _parts, request_method) |
|
91 | 'data: %s, req_method:%s', _parts, request_method) | |
92 |
|
92 | |||
93 | if operation == 'verify': |
|
93 | if operation == 'verify': | |
94 | return 'pull' |
|
94 | return 'pull' | |
95 | elif operation == 'objects/batch': |
|
95 | elif operation == 'objects/batch': | |
96 | # batch sends back instructions for API to dl/upl we report it |
|
96 | # batch sends back instructions for API to dl/upl we report it | |
97 | # as pull |
|
97 | # as pull | |
98 | if request_method == 'POST': |
|
98 | if request_method == 'POST': | |
99 | return 'pull' |
|
99 | return 'pull' | |
100 |
|
100 | |||
101 | elif operation: |
|
101 | elif operation: | |
102 | # probably a OID, upload is PUT, download a GET |
|
102 | # probably a OID, upload is PUT, download a GET | |
103 | if request_method == 'GET': |
|
103 | if request_method == 'GET': | |
104 | return 'pull' |
|
104 | return 'pull' | |
105 | else: |
|
105 | else: | |
106 | return 'push' |
|
106 | return 'push' | |
107 |
|
107 | |||
108 | # if default not found require push, as action |
|
108 | # if default not found require push, as action | |
109 | return 'push' |
|
109 | return 'push' | |
110 |
|
110 | |||
111 | _ACTION_MAPPING = { |
|
111 | _ACTION_MAPPING = { | |
112 | 'git-receive-pack': 'push', |
|
112 | 'git-receive-pack': 'push', | |
113 | 'git-upload-pack': 'pull', |
|
113 | 'git-upload-pack': 'pull', | |
114 | } |
|
114 | } | |
115 |
|
115 | |||
116 | def _get_action(self, environ): |
|
116 | def _get_action(self, environ): | |
117 | """ |
|
117 | """ | |
118 | Maps git request commands into a pull or push command. |
|
118 | Maps git request commands into a pull or push command. | |
119 | In case of unknown/unexpected data, it returns 'pull' to be safe. |
|
119 | In case of unknown/unexpected data, it returns 'pull' to be safe. | |
120 |
|
120 | |||
121 | :param environ: |
|
121 | :param environ: | |
122 | """ |
|
122 | """ | |
123 | path = environ['PATH_INFO'] |
|
123 | path = environ['PATH_INFO'] | |
124 |
|
124 | |||
125 | if path.endswith('/info/refs'): |
|
125 | if path.endswith('/info/refs'): | |
126 | query = urlparse.parse_qs(environ['QUERY_STRING']) |
|
126 | query = urllib.parse.urlparse.parse_qs(environ['QUERY_STRING']) | |
127 | service_cmd = query.get('service', [''])[0] |
|
127 | service_cmd = query.get('service', [''])[0] | |
128 | return self._ACTION_MAPPING.get(service_cmd, 'pull') |
|
128 | return self._ACTION_MAPPING.get(service_cmd, 'pull') | |
129 |
|
129 | |||
130 | elif GIT_LFS_PROTO_PAT.match(environ['PATH_INFO']): |
|
130 | elif GIT_LFS_PROTO_PAT.match(environ['PATH_INFO']): | |
131 | return self._get_lfs_action( |
|
131 | return self._get_lfs_action( | |
132 | environ['PATH_INFO'], environ['REQUEST_METHOD']) |
|
132 | environ['PATH_INFO'], environ['REQUEST_METHOD']) | |
133 |
|
133 | |||
134 | elif path.endswith('/git-receive-pack'): |
|
134 | elif path.endswith('/git-receive-pack'): | |
135 | return 'push' |
|
135 | return 'push' | |
136 | elif path.endswith('/git-upload-pack'): |
|
136 | elif path.endswith('/git-upload-pack'): | |
137 | return 'pull' |
|
137 | return 'pull' | |
138 |
|
138 | |||
139 | return 'pull' |
|
139 | return 'pull' | |
140 |
|
140 | |||
141 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
141 | def _create_wsgi_app(self, repo_path, repo_name, config): | |
142 | return self.scm_app.create_git_wsgi_app( |
|
142 | return self.scm_app.create_git_wsgi_app( | |
143 | repo_path, repo_name, config) |
|
143 | repo_path, repo_name, config) | |
144 |
|
144 | |||
145 | def _create_config(self, extras, repo_name, scheme='http'): |
|
145 | def _create_config(self, extras, repo_name, scheme='http'): | |
146 | extras['git_update_server_info'] = utils2.str2bool( |
|
146 | extras['git_update_server_info'] = utils2.str2bool( | |
147 | rhodecode.CONFIG.get('git_update_server_info')) |
|
147 | rhodecode.CONFIG.get('git_update_server_info')) | |
148 |
|
148 | |||
149 | config = utils.make_db_config(repo=repo_name) |
|
149 | config = utils.make_db_config(repo=repo_name) | |
150 | custom_store = config.get('vcs_git_lfs', 'store_location') |
|
150 | custom_store = config.get('vcs_git_lfs', 'store_location') | |
151 |
|
151 | |||
152 | extras['git_lfs_enabled'] = utils2.str2bool( |
|
152 | extras['git_lfs_enabled'] = utils2.str2bool( | |
153 | config.get('vcs_git_lfs', 'enabled')) |
|
153 | config.get('vcs_git_lfs', 'enabled')) | |
154 | extras['git_lfs_store_path'] = custom_store or default_lfs_store() |
|
154 | extras['git_lfs_store_path'] = custom_store or default_lfs_store() | |
155 | extras['git_lfs_http_scheme'] = scheme |
|
155 | extras['git_lfs_http_scheme'] = scheme | |
156 | return extras |
|
156 | return extras |
@@ -1,160 +1,160 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | SimpleHG middleware for handling mercurial protocol request |
|
22 | SimpleHG middleware for handling mercurial protocol request | |
23 | (push/clone etc.). It's implemented with basic auth function |
|
23 | (push/clone etc.). It's implemented with basic auth function | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 | import urlparse |
|
27 | import urllib.parse | |
28 | import urllib.request, urllib.parse, urllib.error |
|
28 | import urllib.request, urllib.parse, urllib.error | |
29 |
|
29 | |||
30 | from rhodecode.lib import utils |
|
30 | from rhodecode.lib import utils | |
31 | from rhodecode.lib.ext_json import json |
|
31 | from rhodecode.lib.ext_json import json | |
32 | from rhodecode.lib.middleware import simplevcs |
|
32 | from rhodecode.lib.middleware import simplevcs | |
33 |
|
33 | |||
34 | log = logging.getLogger(__name__) |
|
34 | log = logging.getLogger(__name__) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | class SimpleHg(simplevcs.SimpleVCS): |
|
37 | class SimpleHg(simplevcs.SimpleVCS): | |
38 |
|
38 | |||
39 | SCM = 'hg' |
|
39 | SCM = 'hg' | |
40 |
|
40 | |||
41 | def _get_repository_name(self, environ): |
|
41 | def _get_repository_name(self, environ): | |
42 | """ |
|
42 | """ | |
43 | Gets repository name out of PATH_INFO header |
|
43 | Gets repository name out of PATH_INFO header | |
44 |
|
44 | |||
45 | :param environ: environ where PATH_INFO is stored |
|
45 | :param environ: environ where PATH_INFO is stored | |
46 | """ |
|
46 | """ | |
47 | repo_name = environ['PATH_INFO'] |
|
47 | repo_name = environ['PATH_INFO'] | |
48 | if repo_name and repo_name.startswith('/'): |
|
48 | if repo_name and repo_name.startswith('/'): | |
49 | # remove only the first leading / |
|
49 | # remove only the first leading / | |
50 | repo_name = repo_name[1:] |
|
50 | repo_name = repo_name[1:] | |
51 | return repo_name.rstrip('/') |
|
51 | return repo_name.rstrip('/') | |
52 |
|
52 | |||
53 | _ACTION_MAPPING = { |
|
53 | _ACTION_MAPPING = { | |
54 | 'changegroup': 'pull', |
|
54 | 'changegroup': 'pull', | |
55 | 'changegroupsubset': 'pull', |
|
55 | 'changegroupsubset': 'pull', | |
56 | 'getbundle': 'pull', |
|
56 | 'getbundle': 'pull', | |
57 | 'stream_out': 'pull', |
|
57 | 'stream_out': 'pull', | |
58 | 'listkeys': 'pull', |
|
58 | 'listkeys': 'pull', | |
59 | 'between': 'pull', |
|
59 | 'between': 'pull', | |
60 | 'branchmap': 'pull', |
|
60 | 'branchmap': 'pull', | |
61 | 'branches': 'pull', |
|
61 | 'branches': 'pull', | |
62 | 'clonebundles': 'pull', |
|
62 | 'clonebundles': 'pull', | |
63 | 'capabilities': 'pull', |
|
63 | 'capabilities': 'pull', | |
64 | 'debugwireargs': 'pull', |
|
64 | 'debugwireargs': 'pull', | |
65 | 'heads': 'pull', |
|
65 | 'heads': 'pull', | |
66 | 'lookup': 'pull', |
|
66 | 'lookup': 'pull', | |
67 | 'hello': 'pull', |
|
67 | 'hello': 'pull', | |
68 | 'known': 'pull', |
|
68 | 'known': 'pull', | |
69 |
|
69 | |||
70 | # largefiles |
|
70 | # largefiles | |
71 | 'putlfile': 'push', |
|
71 | 'putlfile': 'push', | |
72 | 'getlfile': 'pull', |
|
72 | 'getlfile': 'pull', | |
73 | 'statlfile': 'pull', |
|
73 | 'statlfile': 'pull', | |
74 | 'lheads': 'pull', |
|
74 | 'lheads': 'pull', | |
75 |
|
75 | |||
76 | # evolve |
|
76 | # evolve | |
77 | 'evoext_obshashrange_v1': 'pull', |
|
77 | 'evoext_obshashrange_v1': 'pull', | |
78 | 'evoext_obshash': 'pull', |
|
78 | 'evoext_obshash': 'pull', | |
79 | 'evoext_obshash1': 'pull', |
|
79 | 'evoext_obshash1': 'pull', | |
80 |
|
80 | |||
81 | 'unbundle': 'push', |
|
81 | 'unbundle': 'push', | |
82 | 'pushkey': 'push', |
|
82 | 'pushkey': 'push', | |
83 | } |
|
83 | } | |
84 |
|
84 | |||
85 | @classmethod |
|
85 | @classmethod | |
86 | def _get_xarg_headers(cls, environ): |
|
86 | def _get_xarg_headers(cls, environ): | |
87 | i = 1 |
|
87 | i = 1 | |
88 | chunks = [] # gather chunks stored in multiple 'hgarg_N' |
|
88 | chunks = [] # gather chunks stored in multiple 'hgarg_N' | |
89 | while True: |
|
89 | while True: | |
90 | head = environ.get('HTTP_X_HGARG_{}'.format(i)) |
|
90 | head = environ.get('HTTP_X_HGARG_{}'.format(i)) | |
91 | if not head: |
|
91 | if not head: | |
92 | break |
|
92 | break | |
93 | i += 1 |
|
93 | i += 1 | |
94 | chunks.append(urllib.parse.unquote_plus(head)) |
|
94 | chunks.append(urllib.parse.unquote_plus(head)) | |
95 | full_arg = ''.join(chunks) |
|
95 | full_arg = ''.join(chunks) | |
96 | pref = 'cmds=' |
|
96 | pref = 'cmds=' | |
97 | if full_arg.startswith(pref): |
|
97 | if full_arg.startswith(pref): | |
98 | # strip the cmds= header defining our batch commands |
|
98 | # strip the cmds= header defining our batch commands | |
99 | full_arg = full_arg[len(pref):] |
|
99 | full_arg = full_arg[len(pref):] | |
100 | cmds = full_arg.split(';') |
|
100 | cmds = full_arg.split(';') | |
101 | return cmds |
|
101 | return cmds | |
102 |
|
102 | |||
103 | @classmethod |
|
103 | @classmethod | |
104 | def _get_batch_cmd(cls, environ): |
|
104 | def _get_batch_cmd(cls, environ): | |
105 | """ |
|
105 | """ | |
106 | Handle batch command send commands. Those are ';' separated commands |
|
106 | Handle batch command send commands. Those are ';' separated commands | |
107 | sent by batch command that server needs to execute. We need to extract |
|
107 | sent by batch command that server needs to execute. We need to extract | |
108 | those, and map them to our ACTION_MAPPING to get all push/pull commands |
|
108 | those, and map them to our ACTION_MAPPING to get all push/pull commands | |
109 | specified in the batch |
|
109 | specified in the batch | |
110 | """ |
|
110 | """ | |
111 | default = 'push' |
|
111 | default = 'push' | |
112 | batch_cmds = [] |
|
112 | batch_cmds = [] | |
113 | try: |
|
113 | try: | |
114 | cmds = cls._get_xarg_headers(environ) |
|
114 | cmds = cls._get_xarg_headers(environ) | |
115 | for pair in cmds: |
|
115 | for pair in cmds: | |
116 | parts = pair.split(' ', 1) |
|
116 | parts = pair.split(' ', 1) | |
117 | if len(parts) != 2: |
|
117 | if len(parts) != 2: | |
118 | continue |
|
118 | continue | |
119 | # entry should be in a format `key ARGS` |
|
119 | # entry should be in a format `key ARGS` | |
120 | cmd, args = parts |
|
120 | cmd, args = parts | |
121 | action = cls._ACTION_MAPPING.get(cmd, default) |
|
121 | action = cls._ACTION_MAPPING.get(cmd, default) | |
122 | batch_cmds.append(action) |
|
122 | batch_cmds.append(action) | |
123 | except Exception: |
|
123 | except Exception: | |
124 | log.exception('Failed to extract batch commands operations') |
|
124 | log.exception('Failed to extract batch commands operations') | |
125 |
|
125 | |||
126 | # in case we failed, (e.g malformed data) assume it's PUSH sub-command |
|
126 | # in case we failed, (e.g malformed data) assume it's PUSH sub-command | |
127 | # for safety |
|
127 | # for safety | |
128 | return batch_cmds or [default] |
|
128 | return batch_cmds or [default] | |
129 |
|
129 | |||
130 | def _get_action(self, environ): |
|
130 | def _get_action(self, environ): | |
131 | """ |
|
131 | """ | |
132 | Maps mercurial request commands into a pull or push command. |
|
132 | Maps mercurial request commands into a pull or push command. | |
133 | In case of unknown/unexpected data, it returns 'push' to be safe. |
|
133 | In case of unknown/unexpected data, it returns 'push' to be safe. | |
134 |
|
134 | |||
135 | :param environ: |
|
135 | :param environ: | |
136 | """ |
|
136 | """ | |
137 | default = 'push' |
|
137 | default = 'push' | |
138 | query = urlparse.parse_qs(environ['QUERY_STRING'], |
|
138 | query = urllib.parse.urlparse.parse_qs(environ['QUERY_STRING'], | |
139 | keep_blank_values=True) |
|
139 | keep_blank_values=True) | |
140 |
|
140 | |||
141 | if 'cmd' in query: |
|
141 | if 'cmd' in query: | |
142 | cmd = query['cmd'][0] |
|
142 | cmd = query['cmd'][0] | |
143 | if cmd == 'batch': |
|
143 | if cmd == 'batch': | |
144 | cmds = self._get_batch_cmd(environ) |
|
144 | cmds = self._get_batch_cmd(environ) | |
145 | if 'push' in cmds: |
|
145 | if 'push' in cmds: | |
146 | return 'push' |
|
146 | return 'push' | |
147 | else: |
|
147 | else: | |
148 | return 'pull' |
|
148 | return 'pull' | |
149 | return self._ACTION_MAPPING.get(cmd, default) |
|
149 | return self._ACTION_MAPPING.get(cmd, default) | |
150 |
|
150 | |||
151 | return default |
|
151 | return default | |
152 |
|
152 | |||
153 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
153 | def _create_wsgi_app(self, repo_path, repo_name, config): | |
154 | return self.scm_app.create_hg_wsgi_app(repo_path, repo_name, config) |
|
154 | return self.scm_app.create_hg_wsgi_app(repo_path, repo_name, config) | |
155 |
|
155 | |||
156 | def _create_config(self, extras, repo_name, scheme='http'): |
|
156 | def _create_config(self, extras, repo_name, scheme='http'): | |
157 | config = utils.make_db_config(repo=repo_name) |
|
157 | config = utils.make_db_config(repo=repo_name) | |
158 | config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
158 | config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras)) | |
159 |
|
159 | |||
160 | return config.serialize() |
|
160 | return config.serialize() |
@@ -1,229 +1,229 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import base64 |
|
21 | import base64 | |
22 | import logging |
|
22 | import logging | |
23 | import urllib.request, urllib.parse, urllib.error |
|
23 | import urllib.request, urllib.parse, urllib.error | |
24 | import urlparse |
|
24 | import urllib.parse | |
25 |
|
25 | |||
26 | import requests |
|
26 | import requests | |
27 | from pyramid.httpexceptions import HTTPNotAcceptable |
|
27 | from pyramid.httpexceptions import HTTPNotAcceptable | |
28 |
|
28 | |||
29 | from rhodecode.lib import rc_cache |
|
29 | from rhodecode.lib import rc_cache | |
30 | from rhodecode.lib.middleware import simplevcs |
|
30 | from rhodecode.lib.middleware import simplevcs | |
31 | from rhodecode.lib.utils import is_valid_repo |
|
31 | from rhodecode.lib.utils import is_valid_repo | |
32 | from rhodecode.lib.utils2 import str2bool, safe_int, safe_str |
|
32 | from rhodecode.lib.utils2 import str2bool, safe_int, safe_str | |
33 | from rhodecode.lib.ext_json import json |
|
33 | from rhodecode.lib.ext_json import json | |
34 | from rhodecode.lib.hooks_daemon import store_txn_id_data |
|
34 | from rhodecode.lib.hooks_daemon import store_txn_id_data | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | class SimpleSvnApp(object): |
|
40 | class SimpleSvnApp(object): | |
41 | IGNORED_HEADERS = [ |
|
41 | IGNORED_HEADERS = [ | |
42 | 'connection', 'keep-alive', 'content-encoding', |
|
42 | 'connection', 'keep-alive', 'content-encoding', | |
43 | 'transfer-encoding', 'content-length'] |
|
43 | 'transfer-encoding', 'content-length'] | |
44 | rc_extras = {} |
|
44 | rc_extras = {} | |
45 |
|
45 | |||
46 | def __init__(self, config): |
|
46 | def __init__(self, config): | |
47 | self.config = config |
|
47 | self.config = config | |
48 |
|
48 | |||
49 | def __call__(self, environ, start_response): |
|
49 | def __call__(self, environ, start_response): | |
50 | request_headers = self._get_request_headers(environ) |
|
50 | request_headers = self._get_request_headers(environ) | |
51 | data = environ['wsgi.input'] |
|
51 | data = environ['wsgi.input'] | |
52 | req_method = environ['REQUEST_METHOD'] |
|
52 | req_method = environ['REQUEST_METHOD'] | |
53 | has_content_length = 'CONTENT_LENGTH' in environ |
|
53 | has_content_length = 'CONTENT_LENGTH' in environ | |
54 | path_info = self._get_url( |
|
54 | path_info = self._get_url( | |
55 | self.config.get('subversion_http_server_url', ''), environ['PATH_INFO']) |
|
55 | self.config.get('subversion_http_server_url', ''), environ['PATH_INFO']) | |
56 | transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '') |
|
56 | transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '') | |
57 | log.debug('Handling: %s method via `%s`', req_method, path_info) |
|
57 | log.debug('Handling: %s method via `%s`', req_method, path_info) | |
58 |
|
58 | |||
59 | # stream control flag, based on request and content type... |
|
59 | # stream control flag, based on request and content type... | |
60 | stream = False |
|
60 | stream = False | |
61 |
|
61 | |||
62 | if req_method in ['MKCOL'] or has_content_length: |
|
62 | if req_method in ['MKCOL'] or has_content_length: | |
63 | data_processed = False |
|
63 | data_processed = False | |
64 | # read chunk to check if we have txn-with-props |
|
64 | # read chunk to check if we have txn-with-props | |
65 | initial_data = data.read(1024) |
|
65 | initial_data = data.read(1024) | |
66 | if initial_data.startswith('(create-txn-with-props'): |
|
66 | if initial_data.startswith('(create-txn-with-props'): | |
67 | data = initial_data + data.read() |
|
67 | data = initial_data + data.read() | |
68 | # store on-the-fly our rc_extra using svn revision properties |
|
68 | # store on-the-fly our rc_extra using svn revision properties | |
69 | # those can be read later on in hooks executed so we have a way |
|
69 | # those can be read later on in hooks executed so we have a way | |
70 | # to pass in the data into svn hooks |
|
70 | # to pass in the data into svn hooks | |
71 | rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras)) |
|
71 | rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras)) | |
72 | rc_data_len = len(rc_data) |
|
72 | rc_data_len = len(rc_data) | |
73 | # header defines data length, and serialized data |
|
73 | # header defines data length, and serialized data | |
74 | skel = ' rc-scm-extras {} {}'.format(rc_data_len, rc_data) |
|
74 | skel = ' rc-scm-extras {} {}'.format(rc_data_len, rc_data) | |
75 | data = data[:-2] + skel + '))' |
|
75 | data = data[:-2] + skel + '))' | |
76 | data_processed = True |
|
76 | data_processed = True | |
77 |
|
77 | |||
78 | if not data_processed: |
|
78 | if not data_processed: | |
79 | # NOTE(johbo): Avoid that we end up with sending the request in chunked |
|
79 | # NOTE(johbo): Avoid that we end up with sending the request in chunked | |
80 | # transfer encoding (mainly on Gunicorn). If we know the content |
|
80 | # transfer encoding (mainly on Gunicorn). If we know the content | |
81 | # length, then we should transfer the payload in one request. |
|
81 | # length, then we should transfer the payload in one request. | |
82 | data = initial_data + data.read() |
|
82 | data = initial_data + data.read() | |
83 |
|
83 | |||
84 | if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked': |
|
84 | if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked': | |
85 | # NOTE(marcink): when getting/uploading files we want to STREAM content |
|
85 | # NOTE(marcink): when getting/uploading files we want to STREAM content | |
86 | # back to the client/proxy instead of buffering it here... |
|
86 | # back to the client/proxy instead of buffering it here... | |
87 | stream = True |
|
87 | stream = True | |
88 |
|
88 | |||
89 | stream = stream |
|
89 | stream = stream | |
90 | log.debug('Calling SVN PROXY at `%s`, using method:%s. Stream: %s', |
|
90 | log.debug('Calling SVN PROXY at `%s`, using method:%s. Stream: %s', | |
91 | path_info, req_method, stream) |
|
91 | path_info, req_method, stream) | |
92 | try: |
|
92 | try: | |
93 | response = requests.request( |
|
93 | response = requests.request( | |
94 | req_method, path_info, |
|
94 | req_method, path_info, | |
95 | data=data, headers=request_headers, stream=stream) |
|
95 | data=data, headers=request_headers, stream=stream) | |
96 | except requests.ConnectionError: |
|
96 | except requests.ConnectionError: | |
97 | log.exception('ConnectionError occurred for endpoint %s', path_info) |
|
97 | log.exception('ConnectionError occurred for endpoint %s', path_info) | |
98 | raise |
|
98 | raise | |
99 |
|
99 | |||
100 | if response.status_code not in [200, 401]: |
|
100 | if response.status_code not in [200, 401]: | |
101 | from rhodecode.lib.utils2 import safe_str |
|
101 | from rhodecode.lib.utils2 import safe_str | |
102 | text = '\n{}'.format(safe_str(response.text)) if response.text else '' |
|
102 | text = '\n{}'.format(safe_str(response.text)) if response.text else '' | |
103 | if response.status_code >= 500: |
|
103 | if response.status_code >= 500: | |
104 | log.error('Got SVN response:%s with text:`%s`', response, text) |
|
104 | log.error('Got SVN response:%s with text:`%s`', response, text) | |
105 | else: |
|
105 | else: | |
106 | log.debug('Got SVN response:%s with text:`%s`', response, text) |
|
106 | log.debug('Got SVN response:%s with text:`%s`', response, text) | |
107 | else: |
|
107 | else: | |
108 | log.debug('got response code: %s', response.status_code) |
|
108 | log.debug('got response code: %s', response.status_code) | |
109 |
|
109 | |||
110 | response_headers = self._get_response_headers(response.headers) |
|
110 | response_headers = self._get_response_headers(response.headers) | |
111 |
|
111 | |||
112 | if response.headers.get('SVN-Txn-name'): |
|
112 | if response.headers.get('SVN-Txn-name'): | |
113 | svn_tx_id = response.headers.get('SVN-Txn-name') |
|
113 | svn_tx_id = response.headers.get('SVN-Txn-name') | |
114 | txn_id = rc_cache.utils.compute_key_from_params( |
|
114 | txn_id = rc_cache.utils.compute_key_from_params( | |
115 | self.config['repository'], svn_tx_id) |
|
115 | self.config['repository'], svn_tx_id) | |
116 | port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) |
|
116 | port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) | |
117 | store_txn_id_data(txn_id, {'port': port}) |
|
117 | store_txn_id_data(txn_id, {'port': port}) | |
118 |
|
118 | |||
119 | start_response( |
|
119 | start_response( | |
120 | '{} {}'.format(response.status_code, response.reason), |
|
120 | '{} {}'.format(response.status_code, response.reason), | |
121 | response_headers) |
|
121 | response_headers) | |
122 | return response.iter_content(chunk_size=1024) |
|
122 | return response.iter_content(chunk_size=1024) | |
123 |
|
123 | |||
124 | def _get_url(self, svn_http_server, path): |
|
124 | def _get_url(self, svn_http_server, path): | |
125 | svn_http_server_url = (svn_http_server or '').rstrip('/') |
|
125 | svn_http_server_url = (svn_http_server or '').rstrip('/') | |
126 | url_path = urlparse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/')) |
|
126 | url_path = urllib.parse.urlparse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/')) | |
127 | url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'") |
|
127 | url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'") | |
128 | return url_path |
|
128 | return url_path | |
129 |
|
129 | |||
130 | def _get_request_headers(self, environ): |
|
130 | def _get_request_headers(self, environ): | |
131 | headers = {} |
|
131 | headers = {} | |
132 |
|
132 | |||
133 | for key in environ: |
|
133 | for key in environ: | |
134 | if not key.startswith('HTTP_'): |
|
134 | if not key.startswith('HTTP_'): | |
135 | continue |
|
135 | continue | |
136 | new_key = key.split('_') |
|
136 | new_key = key.split('_') | |
137 | new_key = [k.capitalize() for k in new_key[1:]] |
|
137 | new_key = [k.capitalize() for k in new_key[1:]] | |
138 | new_key = '-'.join(new_key) |
|
138 | new_key = '-'.join(new_key) | |
139 | headers[new_key] = environ[key] |
|
139 | headers[new_key] = environ[key] | |
140 |
|
140 | |||
141 | if 'CONTENT_TYPE' in environ: |
|
141 | if 'CONTENT_TYPE' in environ: | |
142 | headers['Content-Type'] = environ['CONTENT_TYPE'] |
|
142 | headers['Content-Type'] = environ['CONTENT_TYPE'] | |
143 |
|
143 | |||
144 | if 'CONTENT_LENGTH' in environ: |
|
144 | if 'CONTENT_LENGTH' in environ: | |
145 | headers['Content-Length'] = environ['CONTENT_LENGTH'] |
|
145 | headers['Content-Length'] = environ['CONTENT_LENGTH'] | |
146 |
|
146 | |||
147 | return headers |
|
147 | return headers | |
148 |
|
148 | |||
149 | def _get_response_headers(self, headers): |
|
149 | def _get_response_headers(self, headers): | |
150 | headers = [ |
|
150 | headers = [ | |
151 | (h, headers[h]) |
|
151 | (h, headers[h]) | |
152 | for h in headers |
|
152 | for h in headers | |
153 | if h.lower() not in self.IGNORED_HEADERS |
|
153 | if h.lower() not in self.IGNORED_HEADERS | |
154 | ] |
|
154 | ] | |
155 |
|
155 | |||
156 | return headers |
|
156 | return headers | |
157 |
|
157 | |||
158 |
|
158 | |||
159 | class DisabledSimpleSvnApp(object): |
|
159 | class DisabledSimpleSvnApp(object): | |
160 | def __init__(self, config): |
|
160 | def __init__(self, config): | |
161 | self.config = config |
|
161 | self.config = config | |
162 |
|
162 | |||
163 | def __call__(self, environ, start_response): |
|
163 | def __call__(self, environ, start_response): | |
164 | reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled' |
|
164 | reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled' | |
165 | log.warning(reason) |
|
165 | log.warning(reason) | |
166 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
166 | return HTTPNotAcceptable(reason)(environ, start_response) | |
167 |
|
167 | |||
168 |
|
168 | |||
169 | class SimpleSvn(simplevcs.SimpleVCS): |
|
169 | class SimpleSvn(simplevcs.SimpleVCS): | |
170 |
|
170 | |||
171 | SCM = 'svn' |
|
171 | SCM = 'svn' | |
172 | READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT') |
|
172 | READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT') | |
173 | DEFAULT_HTTP_SERVER = 'http://localhost:8090' |
|
173 | DEFAULT_HTTP_SERVER = 'http://localhost:8090' | |
174 |
|
174 | |||
175 | def _get_repository_name(self, environ): |
|
175 | def _get_repository_name(self, environ): | |
176 | """ |
|
176 | """ | |
177 | Gets repository name out of PATH_INFO header |
|
177 | Gets repository name out of PATH_INFO header | |
178 |
|
178 | |||
179 | :param environ: environ where PATH_INFO is stored |
|
179 | :param environ: environ where PATH_INFO is stored | |
180 | """ |
|
180 | """ | |
181 | path = environ['PATH_INFO'].split('!') |
|
181 | path = environ['PATH_INFO'].split('!') | |
182 | repo_name = path[0].strip('/') |
|
182 | repo_name = path[0].strip('/') | |
183 |
|
183 | |||
184 | # SVN includes the whole path in it's requests, including |
|
184 | # SVN includes the whole path in it's requests, including | |
185 | # subdirectories inside the repo. Therefore we have to search for |
|
185 | # subdirectories inside the repo. Therefore we have to search for | |
186 | # the repo root directory. |
|
186 | # the repo root directory. | |
187 | if not is_valid_repo( |
|
187 | if not is_valid_repo( | |
188 | repo_name, self.base_path, explicit_scm=self.SCM): |
|
188 | repo_name, self.base_path, explicit_scm=self.SCM): | |
189 | current_path = '' |
|
189 | current_path = '' | |
190 | for component in repo_name.split('/'): |
|
190 | for component in repo_name.split('/'): | |
191 | current_path += component |
|
191 | current_path += component | |
192 | if is_valid_repo( |
|
192 | if is_valid_repo( | |
193 | current_path, self.base_path, explicit_scm=self.SCM): |
|
193 | current_path, self.base_path, explicit_scm=self.SCM): | |
194 | return current_path |
|
194 | return current_path | |
195 | current_path += '/' |
|
195 | current_path += '/' | |
196 |
|
196 | |||
197 | return repo_name |
|
197 | return repo_name | |
198 |
|
198 | |||
199 | def _get_action(self, environ): |
|
199 | def _get_action(self, environ): | |
200 | return ( |
|
200 | return ( | |
201 | 'pull' |
|
201 | 'pull' | |
202 | if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS |
|
202 | if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS | |
203 | else 'push') |
|
203 | else 'push') | |
204 |
|
204 | |||
205 | def _should_use_callback_daemon(self, extras, environ, action): |
|
205 | def _should_use_callback_daemon(self, extras, environ, action): | |
206 | # only MERGE command triggers hooks, so we don't want to start |
|
206 | # only MERGE command triggers hooks, so we don't want to start | |
207 | # hooks server too many times. POST however starts the svn transaction |
|
207 | # hooks server too many times. POST however starts the svn transaction | |
208 | # so we also need to run the init of callback daemon of POST |
|
208 | # so we also need to run the init of callback daemon of POST | |
209 | if environ['REQUEST_METHOD'] in ['MERGE', 'POST']: |
|
209 | if environ['REQUEST_METHOD'] in ['MERGE', 'POST']: | |
210 | return True |
|
210 | return True | |
211 | return False |
|
211 | return False | |
212 |
|
212 | |||
213 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
213 | def _create_wsgi_app(self, repo_path, repo_name, config): | |
214 | if self._is_svn_enabled(): |
|
214 | if self._is_svn_enabled(): | |
215 | return SimpleSvnApp(config) |
|
215 | return SimpleSvnApp(config) | |
216 | # we don't have http proxy enabled return dummy request handler |
|
216 | # we don't have http proxy enabled return dummy request handler | |
217 | return DisabledSimpleSvnApp(config) |
|
217 | return DisabledSimpleSvnApp(config) | |
218 |
|
218 | |||
219 | def _is_svn_enabled(self): |
|
219 | def _is_svn_enabled(self): | |
220 | conf = self.repo_vcs_config |
|
220 | conf = self.repo_vcs_config | |
221 | return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) |
|
221 | return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) | |
222 |
|
222 | |||
223 | def _create_config(self, extras, repo_name, scheme='http'): |
|
223 | def _create_config(self, extras, repo_name, scheme='http'): | |
224 | conf = self.repo_vcs_config |
|
224 | conf = self.repo_vcs_config | |
225 | server_url = conf.get('vcs_svn_proxy', 'http_server_url') |
|
225 | server_url = conf.get('vcs_svn_proxy', 'http_server_url') | |
226 | server_url = server_url or self.DEFAULT_HTTP_SERVER |
|
226 | server_url = server_url or self.DEFAULT_HTTP_SERVER | |
227 |
|
227 | |||
228 | extras['subversion_http_server_url'] = server_url |
|
228 | extras['subversion_http_server_url'] = server_url | |
229 | return extras |
|
229 | return extras |
@@ -1,189 +1,189 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Implementation of the scm_app interface using raw HTTP communication. |
|
22 | Implementation of the scm_app interface using raw HTTP communication. | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import base64 |
|
25 | import base64 | |
26 | import logging |
|
26 | import logging | |
27 | import urlparse |
|
27 | import urllib.parse | |
28 | import wsgiref.util |
|
28 | import wsgiref.util | |
29 |
|
29 | |||
30 | import msgpack |
|
30 | import msgpack | |
31 | import requests |
|
31 | import requests | |
32 | import webob.request |
|
32 | import webob.request | |
33 |
|
33 | |||
34 | import rhodecode |
|
34 | import rhodecode | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | def create_git_wsgi_app(repo_path, repo_name, config): |
|
40 | def create_git_wsgi_app(repo_path, repo_name, config): | |
41 | url = _vcs_streaming_url() + 'git/' |
|
41 | url = _vcs_streaming_url() + 'git/' | |
42 | return VcsHttpProxy(url, repo_path, repo_name, config) |
|
42 | return VcsHttpProxy(url, repo_path, repo_name, config) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | def create_hg_wsgi_app(repo_path, repo_name, config): |
|
45 | def create_hg_wsgi_app(repo_path, repo_name, config): | |
46 | url = _vcs_streaming_url() + 'hg/' |
|
46 | url = _vcs_streaming_url() + 'hg/' | |
47 | return VcsHttpProxy(url, repo_path, repo_name, config) |
|
47 | return VcsHttpProxy(url, repo_path, repo_name, config) | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | def _vcs_streaming_url(): |
|
50 | def _vcs_streaming_url(): | |
51 | template = 'http://{}/stream/' |
|
51 | template = 'http://{}/stream/' | |
52 | return template.format(rhodecode.CONFIG['vcs.server']) |
|
52 | return template.format(rhodecode.CONFIG['vcs.server']) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | # TODO: johbo: Avoid the global. |
|
55 | # TODO: johbo: Avoid the global. | |
56 | session = requests.Session() |
|
56 | session = requests.Session() | |
57 | # Requests speedup, avoid reading .netrc and similar |
|
57 | # Requests speedup, avoid reading .netrc and similar | |
58 | session.trust_env = False |
|
58 | session.trust_env = False | |
59 |
|
59 | |||
60 | # prevent urllib3 spawning our logs. |
|
60 | # prevent urllib3 spawning our logs. | |
61 | logging.getLogger("requests.packages.urllib3.connectionpool").setLevel( |
|
61 | logging.getLogger("requests.packages.urllib3.connectionpool").setLevel( | |
62 | logging.WARNING) |
|
62 | logging.WARNING) | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | class VcsHttpProxy(object): |
|
65 | class VcsHttpProxy(object): | |
66 | """ |
|
66 | """ | |
67 | A WSGI application which proxies vcs requests. |
|
67 | A WSGI application which proxies vcs requests. | |
68 |
|
68 | |||
69 | The goal is to shuffle the data around without touching it. The only |
|
69 | The goal is to shuffle the data around without touching it. The only | |
70 | exception is the extra data from the config object which we send to the |
|
70 | exception is the extra data from the config object which we send to the | |
71 | server as well. |
|
71 | server as well. | |
72 | """ |
|
72 | """ | |
73 |
|
73 | |||
74 | def __init__(self, url, repo_path, repo_name, config): |
|
74 | def __init__(self, url, repo_path, repo_name, config): | |
75 | """ |
|
75 | """ | |
76 | :param str url: The URL of the VCSServer to call. |
|
76 | :param str url: The URL of the VCSServer to call. | |
77 | """ |
|
77 | """ | |
78 | self._url = url |
|
78 | self._url = url | |
79 | self._repo_name = repo_name |
|
79 | self._repo_name = repo_name | |
80 | self._repo_path = repo_path |
|
80 | self._repo_path = repo_path | |
81 | self._config = config |
|
81 | self._config = config | |
82 | self.rc_extras = {} |
|
82 | self.rc_extras = {} | |
83 | log.debug( |
|
83 | log.debug( | |
84 | "Creating VcsHttpProxy for repo %s, url %s", |
|
84 | "Creating VcsHttpProxy for repo %s, url %s", | |
85 | repo_name, url) |
|
85 | repo_name, url) | |
86 |
|
86 | |||
87 | def __call__(self, environ, start_response): |
|
87 | def __call__(self, environ, start_response): | |
88 | config = msgpack.packb(self._config) |
|
88 | config = msgpack.packb(self._config) | |
89 | request = webob.request.Request(environ) |
|
89 | request = webob.request.Request(environ) | |
90 | request_headers = request.headers |
|
90 | request_headers = request.headers | |
91 |
|
91 | |||
92 | request_headers.update({ |
|
92 | request_headers.update({ | |
93 | # TODO: johbo: Remove this, rely on URL path only |
|
93 | # TODO: johbo: Remove this, rely on URL path only | |
94 | 'X-RC-Repo-Name': self._repo_name, |
|
94 | 'X-RC-Repo-Name': self._repo_name, | |
95 | 'X-RC-Repo-Path': self._repo_path, |
|
95 | 'X-RC-Repo-Path': self._repo_path, | |
96 | 'X-RC-Path-Info': environ['PATH_INFO'], |
|
96 | 'X-RC-Path-Info': environ['PATH_INFO'], | |
97 |
|
97 | |||
98 | 'X-RC-Repo-Store': self.rc_extras.get('repo_store'), |
|
98 | 'X-RC-Repo-Store': self.rc_extras.get('repo_store'), | |
99 | 'X-RC-Server-Config-File': self.rc_extras.get('config'), |
|
99 | 'X-RC-Server-Config-File': self.rc_extras.get('config'), | |
100 |
|
100 | |||
101 | 'X-RC-Auth-User': self.rc_extras.get('username'), |
|
101 | 'X-RC-Auth-User': self.rc_extras.get('username'), | |
102 | 'X-RC-Auth-User-Id': str(self.rc_extras.get('user_id')), |
|
102 | 'X-RC-Auth-User-Id': str(self.rc_extras.get('user_id')), | |
103 | 'X-RC-Auth-User-Ip': self.rc_extras.get('ip'), |
|
103 | 'X-RC-Auth-User-Ip': self.rc_extras.get('ip'), | |
104 |
|
104 | |||
105 | # TODO: johbo: Avoid encoding and put this into payload? |
|
105 | # TODO: johbo: Avoid encoding and put this into payload? | |
106 | 'X-RC-Repo-Config': base64.b64encode(config), |
|
106 | 'X-RC-Repo-Config': base64.b64encode(config), | |
107 | 'X-RC-Locked-Status-Code': rhodecode.CONFIG.get('lock_ret_code'), |
|
107 | 'X-RC-Locked-Status-Code': rhodecode.CONFIG.get('lock_ret_code'), | |
108 | }) |
|
108 | }) | |
109 |
|
109 | |||
110 | method = environ['REQUEST_METHOD'] |
|
110 | method = environ['REQUEST_METHOD'] | |
111 |
|
111 | |||
112 | # Preserve the query string |
|
112 | # Preserve the query string | |
113 | url = self._url |
|
113 | url = self._url | |
114 | url = urlparse.urljoin(url, self._repo_name) |
|
114 | url = urllib.parse.urlparse.urljoin(url, self._repo_name) | |
115 | if environ.get('QUERY_STRING'): |
|
115 | if environ.get('QUERY_STRING'): | |
116 | url += '?' + environ['QUERY_STRING'] |
|
116 | url += '?' + environ['QUERY_STRING'] | |
117 |
|
117 | |||
118 | log.debug('http-app: preparing request to: %s', url) |
|
118 | log.debug('http-app: preparing request to: %s', url) | |
119 | response = session.request( |
|
119 | response = session.request( | |
120 | method, |
|
120 | method, | |
121 | url, |
|
121 | url, | |
122 | data=_maybe_stream_request(environ), |
|
122 | data=_maybe_stream_request(environ), | |
123 | headers=request_headers, |
|
123 | headers=request_headers, | |
124 | stream=True) |
|
124 | stream=True) | |
125 |
|
125 | |||
126 | log.debug('http-app: got vcsserver response: %s', response) |
|
126 | log.debug('http-app: got vcsserver response: %s', response) | |
127 | if response.status_code >= 500: |
|
127 | if response.status_code >= 500: | |
128 | log.error('Exception returned by vcsserver at: %s %s, %s', |
|
128 | log.error('Exception returned by vcsserver at: %s %s, %s', | |
129 | url, response.status_code, response.content) |
|
129 | url, response.status_code, response.content) | |
130 |
|
130 | |||
131 | # Preserve the headers of the response, except hop_by_hop ones |
|
131 | # Preserve the headers of the response, except hop_by_hop ones | |
132 | response_headers = [ |
|
132 | response_headers = [ | |
133 | (h, v) for h, v in response.headers.items() |
|
133 | (h, v) for h, v in response.headers.items() | |
134 | if not wsgiref.util.is_hop_by_hop(h) |
|
134 | if not wsgiref.util.is_hop_by_hop(h) | |
135 | ] |
|
135 | ] | |
136 |
|
136 | |||
137 | # Build status argument for start_response callable. |
|
137 | # Build status argument for start_response callable. | |
138 | status = '{status_code} {reason_phrase}'.format( |
|
138 | status = '{status_code} {reason_phrase}'.format( | |
139 | status_code=response.status_code, |
|
139 | status_code=response.status_code, | |
140 | reason_phrase=response.reason) |
|
140 | reason_phrase=response.reason) | |
141 |
|
141 | |||
142 | start_response(status, response_headers) |
|
142 | start_response(status, response_headers) | |
143 | return _maybe_stream_response(response) |
|
143 | return _maybe_stream_response(response) | |
144 |
|
144 | |||
145 |
|
145 | |||
146 | def read_in_chunks(stream_obj, block_size=1024, chunks=-1): |
|
146 | def read_in_chunks(stream_obj, block_size=1024, chunks=-1): | |
147 | """ |
|
147 | """ | |
148 | Read Stream in chunks, default chunk size: 1k. |
|
148 | Read Stream in chunks, default chunk size: 1k. | |
149 | """ |
|
149 | """ | |
150 | while chunks: |
|
150 | while chunks: | |
151 | data = stream_obj.read(block_size) |
|
151 | data = stream_obj.read(block_size) | |
152 | if not data: |
|
152 | if not data: | |
153 | break |
|
153 | break | |
154 | yield data |
|
154 | yield data | |
155 | chunks -= 1 |
|
155 | chunks -= 1 | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | def _is_request_chunked(environ): |
|
158 | def _is_request_chunked(environ): | |
159 | stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked' |
|
159 | stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked' | |
160 | return stream |
|
160 | return stream | |
161 |
|
161 | |||
162 |
|
162 | |||
163 | def _maybe_stream_request(environ): |
|
163 | def _maybe_stream_request(environ): | |
164 | path = environ['PATH_INFO'] |
|
164 | path = environ['PATH_INFO'] | |
165 | stream = _is_request_chunked(environ) |
|
165 | stream = _is_request_chunked(environ) | |
166 | log.debug('handling request `%s` with stream support: %s', path, stream) |
|
166 | log.debug('handling request `%s` with stream support: %s', path, stream) | |
167 |
|
167 | |||
168 | if stream: |
|
168 | if stream: | |
169 | # set stream by 256k |
|
169 | # set stream by 256k | |
170 | return read_in_chunks(environ['wsgi.input'], block_size=1024 * 256) |
|
170 | return read_in_chunks(environ['wsgi.input'], block_size=1024 * 256) | |
171 | else: |
|
171 | else: | |
172 | return environ['wsgi.input'].read() |
|
172 | return environ['wsgi.input'].read() | |
173 |
|
173 | |||
174 |
|
174 | |||
175 | def _maybe_stream_response(response): |
|
175 | def _maybe_stream_response(response): | |
176 | """ |
|
176 | """ | |
177 | Try to generate chunks from the response if it is chunked. |
|
177 | Try to generate chunks from the response if it is chunked. | |
178 | """ |
|
178 | """ | |
179 | stream = _is_chunked(response) |
|
179 | stream = _is_chunked(response) | |
180 | log.debug('returning response with stream: %s', stream) |
|
180 | log.debug('returning response with stream: %s', stream) | |
181 | if stream: |
|
181 | if stream: | |
182 | # read in 256k Chunks |
|
182 | # read in 256k Chunks | |
183 | return response.raw.read_chunked(amt=1024 * 256) |
|
183 | return response.raw.read_chunked(amt=1024 * 256) | |
184 | else: |
|
184 | else: | |
185 | return [response.content] |
|
185 | return [response.content] | |
186 |
|
186 | |||
187 |
|
187 | |||
188 | def _is_chunked(response): |
|
188 | def _is_chunked(response): | |
189 | return response.headers.get('Transfer-Encoding', '') == 'chunked' |
|
189 | return response.headers.get('Transfer-Encoding', '') == 'chunked' |
@@ -1,284 +1,284 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import gzip |
|
21 | import gzip | |
22 | import shutil |
|
22 | import shutil | |
23 | import logging |
|
23 | import logging | |
24 | import tempfile |
|
24 | import tempfile | |
25 | import urlparse |
|
25 | import urllib.parse | |
26 |
|
26 | |||
27 | from webob.exc import HTTPNotFound |
|
27 | from webob.exc import HTTPNotFound | |
28 |
|
28 | |||
29 | import rhodecode |
|
29 | import rhodecode | |
30 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
30 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled | |
31 | from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT |
|
31 | from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT | |
32 | from rhodecode.lib.middleware.simplehg import SimpleHg |
|
32 | from rhodecode.lib.middleware.simplehg import SimpleHg | |
33 | from rhodecode.lib.middleware.simplesvn import SimpleSvn |
|
33 | from rhodecode.lib.middleware.simplesvn import SimpleSvn | |
34 | from rhodecode.model.settings import VcsSettingsModel |
|
34 | from rhodecode.model.settings import VcsSettingsModel | |
35 |
|
35 | |||
36 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
37 |
|
37 | |||
38 | VCS_TYPE_KEY = '_rc_vcs_type' |
|
38 | VCS_TYPE_KEY = '_rc_vcs_type' | |
39 | VCS_TYPE_SKIP = '_rc_vcs_skip' |
|
39 | VCS_TYPE_SKIP = '_rc_vcs_skip' | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | def is_git(environ): |
|
42 | def is_git(environ): | |
43 | """ |
|
43 | """ | |
44 | Returns True if requests should be handled by GIT wsgi middleware |
|
44 | Returns True if requests should be handled by GIT wsgi middleware | |
45 | """ |
|
45 | """ | |
46 | is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO']) |
|
46 | is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO']) | |
47 | log.debug( |
|
47 | log.debug( | |
48 | 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'], |
|
48 | 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'], | |
49 | is_git_path is not None) |
|
49 | is_git_path is not None) | |
50 |
|
50 | |||
51 | return is_git_path |
|
51 | return is_git_path | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | def is_hg(environ): |
|
54 | def is_hg(environ): | |
55 | """ |
|
55 | """ | |
56 | Returns True if requests target is mercurial server - header |
|
56 | Returns True if requests target is mercurial server - header | |
57 | ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``. |
|
57 | ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``. | |
58 | """ |
|
58 | """ | |
59 | is_hg_path = False |
|
59 | is_hg_path = False | |
60 |
|
60 | |||
61 | http_accept = environ.get('HTTP_ACCEPT') |
|
61 | http_accept = environ.get('HTTP_ACCEPT') | |
62 |
|
62 | |||
63 | if http_accept and http_accept.startswith('application/mercurial'): |
|
63 | if http_accept and http_accept.startswith('application/mercurial'): | |
64 | query = urlparse.parse_qs(environ['QUERY_STRING']) |
|
64 | query = urllib.parse.urlparse.parse_qs(environ['QUERY_STRING']) | |
65 | if 'cmd' in query: |
|
65 | if 'cmd' in query: | |
66 | is_hg_path = True |
|
66 | is_hg_path = True | |
67 |
|
67 | |||
68 | log.debug( |
|
68 | log.debug( | |
69 | 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'], |
|
69 | 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'], | |
70 | is_hg_path) |
|
70 | is_hg_path) | |
71 |
|
71 | |||
72 | return is_hg_path |
|
72 | return is_hg_path | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def is_svn(environ): |
|
75 | def is_svn(environ): | |
76 | """ |
|
76 | """ | |
77 | Returns True if requests target is Subversion server |
|
77 | Returns True if requests target is Subversion server | |
78 | """ |
|
78 | """ | |
79 |
|
79 | |||
80 | http_dav = environ.get('HTTP_DAV', '') |
|
80 | http_dav = environ.get('HTTP_DAV', '') | |
81 | magic_path_segment = rhodecode.CONFIG.get( |
|
81 | magic_path_segment = rhodecode.CONFIG.get( | |
82 | 'rhodecode_subversion_magic_path', '/!svn') |
|
82 | 'rhodecode_subversion_magic_path', '/!svn') | |
83 | is_svn_path = ( |
|
83 | is_svn_path = ( | |
84 | 'subversion' in http_dav or |
|
84 | 'subversion' in http_dav or | |
85 | magic_path_segment in environ['PATH_INFO'] |
|
85 | magic_path_segment in environ['PATH_INFO'] | |
86 | or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH'] |
|
86 | or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH'] | |
87 | ) |
|
87 | ) | |
88 | log.debug( |
|
88 | log.debug( | |
89 | 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'], |
|
89 | 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'], | |
90 | is_svn_path) |
|
90 | is_svn_path) | |
91 |
|
91 | |||
92 | return is_svn_path |
|
92 | return is_svn_path | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | class GunzipMiddleware(object): |
|
95 | class GunzipMiddleware(object): | |
96 | """ |
|
96 | """ | |
97 | WSGI middleware that unzips gzip-encoded requests before |
|
97 | WSGI middleware that unzips gzip-encoded requests before | |
98 | passing on to the underlying application. |
|
98 | passing on to the underlying application. | |
99 | """ |
|
99 | """ | |
100 |
|
100 | |||
101 | def __init__(self, application): |
|
101 | def __init__(self, application): | |
102 | self.app = application |
|
102 | self.app = application | |
103 |
|
103 | |||
104 | def __call__(self, environ, start_response): |
|
104 | def __call__(self, environ, start_response): | |
105 | accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'') |
|
105 | accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'') | |
106 |
|
106 | |||
107 | if b'gzip' in accepts_encoding_header: |
|
107 | if b'gzip' in accepts_encoding_header: | |
108 | log.debug('gzip detected, now running gunzip wrapper') |
|
108 | log.debug('gzip detected, now running gunzip wrapper') | |
109 | wsgi_input = environ['wsgi.input'] |
|
109 | wsgi_input = environ['wsgi.input'] | |
110 |
|
110 | |||
111 | if not hasattr(environ['wsgi.input'], 'seek'): |
|
111 | if not hasattr(environ['wsgi.input'], 'seek'): | |
112 | # The gzip implementation in the standard library of Python 2.x |
|
112 | # The gzip implementation in the standard library of Python 2.x | |
113 | # requires the '.seek()' and '.tell()' methods to be available |
|
113 | # requires the '.seek()' and '.tell()' methods to be available | |
114 | # on the input stream. Read the data into a temporary file to |
|
114 | # on the input stream. Read the data into a temporary file to | |
115 | # work around this limitation. |
|
115 | # work around this limitation. | |
116 |
|
116 | |||
117 | wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024) |
|
117 | wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024) | |
118 | shutil.copyfileobj(environ['wsgi.input'], wsgi_input) |
|
118 | shutil.copyfileobj(environ['wsgi.input'], wsgi_input) | |
119 | wsgi_input.seek(0) |
|
119 | wsgi_input.seek(0) | |
120 |
|
120 | |||
121 | environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r') |
|
121 | environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r') | |
122 | # since we "Ungzipped" the content we say now it's no longer gzip |
|
122 | # since we "Ungzipped" the content we say now it's no longer gzip | |
123 | # content encoding |
|
123 | # content encoding | |
124 | del environ['HTTP_CONTENT_ENCODING'] |
|
124 | del environ['HTTP_CONTENT_ENCODING'] | |
125 |
|
125 | |||
126 | # content length has changes ? or i'm not sure |
|
126 | # content length has changes ? or i'm not sure | |
127 | if 'CONTENT_LENGTH' in environ: |
|
127 | if 'CONTENT_LENGTH' in environ: | |
128 | del environ['CONTENT_LENGTH'] |
|
128 | del environ['CONTENT_LENGTH'] | |
129 | else: |
|
129 | else: | |
130 | log.debug('content not gzipped, gzipMiddleware passing ' |
|
130 | log.debug('content not gzipped, gzipMiddleware passing ' | |
131 | 'request further') |
|
131 | 'request further') | |
132 | return self.app(environ, start_response) |
|
132 | return self.app(environ, start_response) | |
133 |
|
133 | |||
134 |
|
134 | |||
135 | def is_vcs_call(environ): |
|
135 | def is_vcs_call(environ): | |
136 | if VCS_TYPE_KEY in environ: |
|
136 | if VCS_TYPE_KEY in environ: | |
137 | raw_type = environ[VCS_TYPE_KEY] |
|
137 | raw_type = environ[VCS_TYPE_KEY] | |
138 | return raw_type and raw_type != VCS_TYPE_SKIP |
|
138 | return raw_type and raw_type != VCS_TYPE_SKIP | |
139 | return False |
|
139 | return False | |
140 |
|
140 | |||
141 |
|
141 | |||
142 | def get_path_elem(route_path): |
|
142 | def get_path_elem(route_path): | |
143 | if not route_path: |
|
143 | if not route_path: | |
144 | return None |
|
144 | return None | |
145 |
|
145 | |||
146 | cleaned_route_path = route_path.lstrip('/') |
|
146 | cleaned_route_path = route_path.lstrip('/') | |
147 | if cleaned_route_path: |
|
147 | if cleaned_route_path: | |
148 | cleaned_route_path_elems = cleaned_route_path.split('/') |
|
148 | cleaned_route_path_elems = cleaned_route_path.split('/') | |
149 | if cleaned_route_path_elems: |
|
149 | if cleaned_route_path_elems: | |
150 | return cleaned_route_path_elems[0] |
|
150 | return cleaned_route_path_elems[0] | |
151 | return None |
|
151 | return None | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | def detect_vcs_request(environ, backends): |
|
154 | def detect_vcs_request(environ, backends): | |
155 | checks = { |
|
155 | checks = { | |
156 | 'hg': (is_hg, SimpleHg), |
|
156 | 'hg': (is_hg, SimpleHg), | |
157 | 'git': (is_git, SimpleGit), |
|
157 | 'git': (is_git, SimpleGit), | |
158 | 'svn': (is_svn, SimpleSvn), |
|
158 | 'svn': (is_svn, SimpleSvn), | |
159 | } |
|
159 | } | |
160 | handler = None |
|
160 | handler = None | |
161 | # List of path views first chunk we don't do any checks |
|
161 | # List of path views first chunk we don't do any checks | |
162 | white_list = [ |
|
162 | white_list = [ | |
163 | # e.g /_file_store/download |
|
163 | # e.g /_file_store/download | |
164 | '_file_store', |
|
164 | '_file_store', | |
165 |
|
165 | |||
166 | # static files no detection |
|
166 | # static files no detection | |
167 | '_static', |
|
167 | '_static', | |
168 |
|
168 | |||
169 | # skip ops ping, status |
|
169 | # skip ops ping, status | |
170 | '_admin/ops/ping', |
|
170 | '_admin/ops/ping', | |
171 | '_admin/ops/status', |
|
171 | '_admin/ops/status', | |
172 |
|
172 | |||
173 | # full channelstream connect should be VCS skipped |
|
173 | # full channelstream connect should be VCS skipped | |
174 | '_admin/channelstream/connect', |
|
174 | '_admin/channelstream/connect', | |
175 | ] |
|
175 | ] | |
176 |
|
176 | |||
177 | path_info = environ['PATH_INFO'] |
|
177 | path_info = environ['PATH_INFO'] | |
178 |
|
178 | |||
179 | path_elem = get_path_elem(path_info) |
|
179 | path_elem = get_path_elem(path_info) | |
180 |
|
180 | |||
181 | if path_elem in white_list: |
|
181 | if path_elem in white_list: | |
182 | log.debug('path `%s` in whitelist, skipping...', path_info) |
|
182 | log.debug('path `%s` in whitelist, skipping...', path_info) | |
183 | return handler |
|
183 | return handler | |
184 |
|
184 | |||
185 | path_url = path_info.lstrip('/') |
|
185 | path_url = path_info.lstrip('/') | |
186 | if path_url in white_list: |
|
186 | if path_url in white_list: | |
187 | log.debug('full url path `%s` in whitelist, skipping...', path_url) |
|
187 | log.debug('full url path `%s` in whitelist, skipping...', path_url) | |
188 | return handler |
|
188 | return handler | |
189 |
|
189 | |||
190 | if VCS_TYPE_KEY in environ: |
|
190 | if VCS_TYPE_KEY in environ: | |
191 | raw_type = environ[VCS_TYPE_KEY] |
|
191 | raw_type = environ[VCS_TYPE_KEY] | |
192 | if raw_type == VCS_TYPE_SKIP: |
|
192 | if raw_type == VCS_TYPE_SKIP: | |
193 | log.debug('got `skip` marker for vcs detection, skipping...') |
|
193 | log.debug('got `skip` marker for vcs detection, skipping...') | |
194 | return handler |
|
194 | return handler | |
195 |
|
195 | |||
196 | _check, handler = checks.get(raw_type) or [None, None] |
|
196 | _check, handler = checks.get(raw_type) or [None, None] | |
197 | if handler: |
|
197 | if handler: | |
198 | log.debug('got handler:%s from environ', handler) |
|
198 | log.debug('got handler:%s from environ', handler) | |
199 |
|
199 | |||
200 | if not handler: |
|
200 | if not handler: | |
201 | log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends) |
|
201 | log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends) | |
202 | for vcs_type in backends: |
|
202 | for vcs_type in backends: | |
203 | vcs_check, _handler = checks[vcs_type] |
|
203 | vcs_check, _handler = checks[vcs_type] | |
204 | if vcs_check(environ): |
|
204 | if vcs_check(environ): | |
205 | log.debug('vcs handler found %s', _handler) |
|
205 | log.debug('vcs handler found %s', _handler) | |
206 | handler = _handler |
|
206 | handler = _handler | |
207 | break |
|
207 | break | |
208 |
|
208 | |||
209 | return handler |
|
209 | return handler | |
210 |
|
210 | |||
211 |
|
211 | |||
212 | class VCSMiddleware(object): |
|
212 | class VCSMiddleware(object): | |
213 |
|
213 | |||
214 | def __init__(self, app, registry, config, appenlight_client): |
|
214 | def __init__(self, app, registry, config, appenlight_client): | |
215 | self.application = app |
|
215 | self.application = app | |
216 | self.registry = registry |
|
216 | self.registry = registry | |
217 | self.config = config |
|
217 | self.config = config | |
218 | self.appenlight_client = appenlight_client |
|
218 | self.appenlight_client = appenlight_client | |
219 | self.use_gzip = True |
|
219 | self.use_gzip = True | |
220 | # order in which we check the middlewares, based on vcs.backends config |
|
220 | # order in which we check the middlewares, based on vcs.backends config | |
221 | self.check_middlewares = config['vcs.backends'] |
|
221 | self.check_middlewares = config['vcs.backends'] | |
222 |
|
222 | |||
223 | def vcs_config(self, repo_name=None): |
|
223 | def vcs_config(self, repo_name=None): | |
224 | """ |
|
224 | """ | |
225 | returns serialized VcsSettings |
|
225 | returns serialized VcsSettings | |
226 | """ |
|
226 | """ | |
227 | try: |
|
227 | try: | |
228 | return VcsSettingsModel( |
|
228 | return VcsSettingsModel( | |
229 | repo=repo_name).get_ui_settings_as_config_obj() |
|
229 | repo=repo_name).get_ui_settings_as_config_obj() | |
230 | except Exception: |
|
230 | except Exception: | |
231 | pass |
|
231 | pass | |
232 |
|
232 | |||
233 | def wrap_in_gzip_if_enabled(self, app, config): |
|
233 | def wrap_in_gzip_if_enabled(self, app, config): | |
234 | if self.use_gzip: |
|
234 | if self.use_gzip: | |
235 | app = GunzipMiddleware(app) |
|
235 | app = GunzipMiddleware(app) | |
236 | return app |
|
236 | return app | |
237 |
|
237 | |||
238 | def _get_handler_app(self, environ): |
|
238 | def _get_handler_app(self, environ): | |
239 | app = None |
|
239 | app = None | |
240 | log.debug('VCSMiddleware: detecting vcs type.') |
|
240 | log.debug('VCSMiddleware: detecting vcs type.') | |
241 | handler = detect_vcs_request(environ, self.check_middlewares) |
|
241 | handler = detect_vcs_request(environ, self.check_middlewares) | |
242 | if handler: |
|
242 | if handler: | |
243 | app = handler(self.config, self.registry) |
|
243 | app = handler(self.config, self.registry) | |
244 |
|
244 | |||
245 | return app |
|
245 | return app | |
246 |
|
246 | |||
247 | def __call__(self, environ, start_response): |
|
247 | def __call__(self, environ, start_response): | |
248 | # check if we handle one of interesting protocols, optionally extract |
|
248 | # check if we handle one of interesting protocols, optionally extract | |
249 | # specific vcsSettings and allow changes of how things are wrapped |
|
249 | # specific vcsSettings and allow changes of how things are wrapped | |
250 | vcs_handler = self._get_handler_app(environ) |
|
250 | vcs_handler = self._get_handler_app(environ) | |
251 | if vcs_handler: |
|
251 | if vcs_handler: | |
252 | # translate the _REPO_ID into real repo NAME for usage |
|
252 | # translate the _REPO_ID into real repo NAME for usage | |
253 | # in middleware |
|
253 | # in middleware | |
254 | environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO']) |
|
254 | environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO']) | |
255 |
|
255 | |||
256 | # Set acl, url and vcs repo names. |
|
256 | # Set acl, url and vcs repo names. | |
257 | vcs_handler.set_repo_names(environ) |
|
257 | vcs_handler.set_repo_names(environ) | |
258 |
|
258 | |||
259 | # register repo config back to the handler |
|
259 | # register repo config back to the handler | |
260 | vcs_conf = self.vcs_config(vcs_handler.acl_repo_name) |
|
260 | vcs_conf = self.vcs_config(vcs_handler.acl_repo_name) | |
261 | # maybe damaged/non existent settings. We still want to |
|
261 | # maybe damaged/non existent settings. We still want to | |
262 | # pass that point to validate on is_valid_and_existing_repo |
|
262 | # pass that point to validate on is_valid_and_existing_repo | |
263 | # and return proper HTTP Code back to client |
|
263 | # and return proper HTTP Code back to client | |
264 | if vcs_conf: |
|
264 | if vcs_conf: | |
265 | vcs_handler.repo_vcs_config = vcs_conf |
|
265 | vcs_handler.repo_vcs_config = vcs_conf | |
266 |
|
266 | |||
267 | # check for type, presence in database and on filesystem |
|
267 | # check for type, presence in database and on filesystem | |
268 | if not vcs_handler.is_valid_and_existing_repo( |
|
268 | if not vcs_handler.is_valid_and_existing_repo( | |
269 | vcs_handler.acl_repo_name, |
|
269 | vcs_handler.acl_repo_name, | |
270 | vcs_handler.base_path, |
|
270 | vcs_handler.base_path, | |
271 | vcs_handler.SCM): |
|
271 | vcs_handler.SCM): | |
272 | return HTTPNotFound()(environ, start_response) |
|
272 | return HTTPNotFound()(environ, start_response) | |
273 |
|
273 | |||
274 | environ['REPO_NAME'] = vcs_handler.url_repo_name |
|
274 | environ['REPO_NAME'] = vcs_handler.url_repo_name | |
275 |
|
275 | |||
276 | # Wrap handler in middlewares if they are enabled. |
|
276 | # Wrap handler in middlewares if they are enabled. | |
277 | vcs_handler = self.wrap_in_gzip_if_enabled( |
|
277 | vcs_handler = self.wrap_in_gzip_if_enabled( | |
278 | vcs_handler, self.config) |
|
278 | vcs_handler, self.config) | |
279 | vcs_handler, _ = wrap_in_appenlight_if_enabled( |
|
279 | vcs_handler, _ = wrap_in_appenlight_if_enabled( | |
280 | vcs_handler, self.config, self.appenlight_client) |
|
280 | vcs_handler, self.config, self.appenlight_client) | |
281 |
|
281 | |||
282 | return vcs_handler(environ, start_response) |
|
282 | return vcs_handler(environ, start_response) | |
283 |
|
283 | |||
284 | return self.application(environ, start_response) |
|
284 | return self.application(environ, start_response) |
@@ -1,412 +1,412 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Client for the VCSServer implemented based on HTTP. |
|
22 | Client for the VCSServer implemented based on HTTP. | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import copy |
|
25 | import copy | |
26 | import logging |
|
26 | import logging | |
27 | import threading |
|
27 | import threading | |
28 | import time |
|
28 | import time | |
29 | import urllib.request, urllib.error, urllib.parse |
|
29 | import urllib.request, urllib.error, urllib.parse | |
30 | import urlparse |
|
30 | import urllib.parse | |
31 | import uuid |
|
31 | import uuid | |
32 | import traceback |
|
32 | import traceback | |
33 |
|
33 | |||
34 | import pycurl |
|
34 | import pycurl | |
35 | import msgpack |
|
35 | import msgpack | |
36 | import requests |
|
36 | import requests | |
37 | from requests.packages.urllib3.util.retry import Retry |
|
37 | from requests.packages.urllib3.util.retry import Retry | |
38 |
|
38 | |||
39 | import rhodecode |
|
39 | import rhodecode | |
40 | from rhodecode.lib import rc_cache |
|
40 | from rhodecode.lib import rc_cache | |
41 | from rhodecode.lib.rc_cache.utils import compute_key_from_params |
|
41 | from rhodecode.lib.rc_cache.utils import compute_key_from_params | |
42 | from rhodecode.lib.system_info import get_cert_path |
|
42 | from rhodecode.lib.system_info import get_cert_path | |
43 | from rhodecode.lib.vcs import exceptions, CurlSession |
|
43 | from rhodecode.lib.vcs import exceptions, CurlSession | |
44 | from rhodecode.lib.utils2 import str2bool |
|
44 | from rhodecode.lib.utils2 import str2bool | |
45 |
|
45 | |||
46 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | # TODO: mikhail: Keep it in sync with vcsserver's |
|
49 | # TODO: mikhail: Keep it in sync with vcsserver's | |
50 | # HTTPApplication.ALLOWED_EXCEPTIONS |
|
50 | # HTTPApplication.ALLOWED_EXCEPTIONS | |
51 | EXCEPTIONS_MAP = { |
|
51 | EXCEPTIONS_MAP = { | |
52 | 'KeyError': KeyError, |
|
52 | 'KeyError': KeyError, | |
53 | 'URLError': urllib.error.URLError, |
|
53 | 'URLError': urllib.error.URLError, | |
54 | } |
|
54 | } | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | def _remote_call(url, payload, exceptions_map, session): |
|
57 | def _remote_call(url, payload, exceptions_map, session): | |
58 | try: |
|
58 | try: | |
59 | headers = { |
|
59 | headers = { | |
60 | 'X-RC-Method': payload.get('method'), |
|
60 | 'X-RC-Method': payload.get('method'), | |
61 | 'X-RC-Repo-Name': payload.get('_repo_name') |
|
61 | 'X-RC-Repo-Name': payload.get('_repo_name') | |
62 | } |
|
62 | } | |
63 | response = session.post(url, data=msgpack.packb(payload), headers=headers) |
|
63 | response = session.post(url, data=msgpack.packb(payload), headers=headers) | |
64 | except pycurl.error as e: |
|
64 | except pycurl.error as e: | |
65 | msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) |
|
65 | msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) | |
66 | raise exceptions.HttpVCSCommunicationError(msg) |
|
66 | raise exceptions.HttpVCSCommunicationError(msg) | |
67 | except Exception as e: |
|
67 | except Exception as e: | |
68 | message = getattr(e, 'message', '') |
|
68 | message = getattr(e, 'message', '') | |
69 | if 'Failed to connect' in message: |
|
69 | if 'Failed to connect' in message: | |
70 | # gevent doesn't return proper pycurl errors |
|
70 | # gevent doesn't return proper pycurl errors | |
71 | raise exceptions.HttpVCSCommunicationError(e) |
|
71 | raise exceptions.HttpVCSCommunicationError(e) | |
72 | else: |
|
72 | else: | |
73 | raise |
|
73 | raise | |
74 |
|
74 | |||
75 | if response.status_code >= 400: |
|
75 | if response.status_code >= 400: | |
76 | log.error('Call to %s returned non 200 HTTP code: %s', |
|
76 | log.error('Call to %s returned non 200 HTTP code: %s', | |
77 | url, response.status_code) |
|
77 | url, response.status_code) | |
78 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) |
|
78 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) | |
79 |
|
79 | |||
80 | try: |
|
80 | try: | |
81 | response = msgpack.unpackb(response.content) |
|
81 | response = msgpack.unpackb(response.content) | |
82 | except Exception: |
|
82 | except Exception: | |
83 | log.exception('Failed to decode response %r', response.content) |
|
83 | log.exception('Failed to decode response %r', response.content) | |
84 | raise |
|
84 | raise | |
85 |
|
85 | |||
86 | error = response.get('error') |
|
86 | error = response.get('error') | |
87 | if error: |
|
87 | if error: | |
88 | type_ = error.get('type', 'Exception') |
|
88 | type_ = error.get('type', 'Exception') | |
89 | exc = exceptions_map.get(type_, Exception) |
|
89 | exc = exceptions_map.get(type_, Exception) | |
90 | exc = exc(error.get('message')) |
|
90 | exc = exc(error.get('message')) | |
91 | try: |
|
91 | try: | |
92 | exc._vcs_kind = error['_vcs_kind'] |
|
92 | exc._vcs_kind = error['_vcs_kind'] | |
93 | except KeyError: |
|
93 | except KeyError: | |
94 | pass |
|
94 | pass | |
95 |
|
95 | |||
96 | try: |
|
96 | try: | |
97 | exc._vcs_server_traceback = error['traceback'] |
|
97 | exc._vcs_server_traceback = error['traceback'] | |
98 | exc._vcs_server_org_exc_name = error['org_exc'] |
|
98 | exc._vcs_server_org_exc_name = error['org_exc'] | |
99 | exc._vcs_server_org_exc_tb = error['org_exc_tb'] |
|
99 | exc._vcs_server_org_exc_tb = error['org_exc_tb'] | |
100 | except KeyError: |
|
100 | except KeyError: | |
101 | pass |
|
101 | pass | |
102 |
|
102 | |||
103 | raise exc |
|
103 | raise exc | |
104 | return response.get('result') |
|
104 | return response.get('result') | |
105 |
|
105 | |||
106 |
|
106 | |||
107 | def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size): |
|
107 | def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size): | |
108 | try: |
|
108 | try: | |
109 | headers = { |
|
109 | headers = { | |
110 | 'X-RC-Method': payload.get('method'), |
|
110 | 'X-RC-Method': payload.get('method'), | |
111 | 'X-RC-Repo-Name': payload.get('_repo_name') |
|
111 | 'X-RC-Repo-Name': payload.get('_repo_name') | |
112 | } |
|
112 | } | |
113 | response = session.post(url, data=msgpack.packb(payload), headers=headers) |
|
113 | response = session.post(url, data=msgpack.packb(payload), headers=headers) | |
114 | except pycurl.error as e: |
|
114 | except pycurl.error as e: | |
115 | msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) |
|
115 | msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) | |
116 | raise exceptions.HttpVCSCommunicationError(msg) |
|
116 | raise exceptions.HttpVCSCommunicationError(msg) | |
117 | except Exception as e: |
|
117 | except Exception as e: | |
118 | message = getattr(e, 'message', '') |
|
118 | message = getattr(e, 'message', '') | |
119 | if 'Failed to connect' in message: |
|
119 | if 'Failed to connect' in message: | |
120 | # gevent doesn't return proper pycurl errors |
|
120 | # gevent doesn't return proper pycurl errors | |
121 | raise exceptions.HttpVCSCommunicationError(e) |
|
121 | raise exceptions.HttpVCSCommunicationError(e) | |
122 | else: |
|
122 | else: | |
123 | raise |
|
123 | raise | |
124 |
|
124 | |||
125 | if response.status_code >= 400: |
|
125 | if response.status_code >= 400: | |
126 | log.error('Call to %s returned non 200 HTTP code: %s', |
|
126 | log.error('Call to %s returned non 200 HTTP code: %s', | |
127 | url, response.status_code) |
|
127 | url, response.status_code) | |
128 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) |
|
128 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) | |
129 |
|
129 | |||
130 | return response.iter_content(chunk_size=chunk_size) |
|
130 | return response.iter_content(chunk_size=chunk_size) | |
131 |
|
131 | |||
132 |
|
132 | |||
133 | class ServiceConnection(object): |
|
133 | class ServiceConnection(object): | |
134 | def __init__(self, server_and_port, backend_endpoint, session_factory): |
|
134 | def __init__(self, server_and_port, backend_endpoint, session_factory): | |
135 | self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) |
|
135 | self.url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
136 | self._session_factory = session_factory |
|
136 | self._session_factory = session_factory | |
137 |
|
137 | |||
138 | def __getattr__(self, name): |
|
138 | def __getattr__(self, name): | |
139 | def f(*args, **kwargs): |
|
139 | def f(*args, **kwargs): | |
140 | return self._call(name, *args, **kwargs) |
|
140 | return self._call(name, *args, **kwargs) | |
141 | return f |
|
141 | return f | |
142 |
|
142 | |||
143 | @exceptions.map_vcs_exceptions |
|
143 | @exceptions.map_vcs_exceptions | |
144 | def _call(self, name, *args, **kwargs): |
|
144 | def _call(self, name, *args, **kwargs): | |
145 | payload = { |
|
145 | payload = { | |
146 | 'id': str(uuid.uuid4()), |
|
146 | 'id': str(uuid.uuid4()), | |
147 | 'method': name, |
|
147 | 'method': name, | |
148 | 'params': {'args': args, 'kwargs': kwargs} |
|
148 | 'params': {'args': args, 'kwargs': kwargs} | |
149 | } |
|
149 | } | |
150 | return _remote_call( |
|
150 | return _remote_call( | |
151 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
151 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | class RemoteVCSMaker(object): |
|
154 | class RemoteVCSMaker(object): | |
155 |
|
155 | |||
156 | def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): |
|
156 | def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): | |
157 | self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) |
|
157 | self.url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
158 | self.stream_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream') |
|
158 | self.stream_url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream') | |
159 |
|
159 | |||
160 | self._session_factory = session_factory |
|
160 | self._session_factory = session_factory | |
161 | self.backend_type = backend_type |
|
161 | self.backend_type = backend_type | |
162 |
|
162 | |||
163 | @classmethod |
|
163 | @classmethod | |
164 | def init_cache_region(cls, repo_id): |
|
164 | def init_cache_region(cls, repo_id): | |
165 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
165 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) | |
166 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
166 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
167 | return region, cache_namespace_uid |
|
167 | return region, cache_namespace_uid | |
168 |
|
168 | |||
169 | def __call__(self, path, repo_id, config, with_wire=None): |
|
169 | def __call__(self, path, repo_id, config, with_wire=None): | |
170 | log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path) |
|
170 | log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path) | |
171 | return RemoteRepo(path, repo_id, config, self, with_wire=with_wire) |
|
171 | return RemoteRepo(path, repo_id, config, self, with_wire=with_wire) | |
172 |
|
172 | |||
173 | def __getattr__(self, name): |
|
173 | def __getattr__(self, name): | |
174 | def remote_attr(*args, **kwargs): |
|
174 | def remote_attr(*args, **kwargs): | |
175 | return self._call(name, *args, **kwargs) |
|
175 | return self._call(name, *args, **kwargs) | |
176 | return remote_attr |
|
176 | return remote_attr | |
177 |
|
177 | |||
178 | @exceptions.map_vcs_exceptions |
|
178 | @exceptions.map_vcs_exceptions | |
179 | def _call(self, func_name, *args, **kwargs): |
|
179 | def _call(self, func_name, *args, **kwargs): | |
180 | payload = { |
|
180 | payload = { | |
181 | 'id': str(uuid.uuid4()), |
|
181 | 'id': str(uuid.uuid4()), | |
182 | 'method': func_name, |
|
182 | 'method': func_name, | |
183 | 'backend': self.backend_type, |
|
183 | 'backend': self.backend_type, | |
184 | 'params': {'args': args, 'kwargs': kwargs} |
|
184 | 'params': {'args': args, 'kwargs': kwargs} | |
185 | } |
|
185 | } | |
186 | url = self.url |
|
186 | url = self.url | |
187 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
187 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory()) | |
188 |
|
188 | |||
189 |
|
189 | |||
190 | class RemoteRepo(object): |
|
190 | class RemoteRepo(object): | |
191 | CHUNK_SIZE = 16384 |
|
191 | CHUNK_SIZE = 16384 | |
192 |
|
192 | |||
193 | def __init__(self, path, repo_id, config, remote_maker, with_wire=None): |
|
193 | def __init__(self, path, repo_id, config, remote_maker, with_wire=None): | |
194 | self.url = remote_maker.url |
|
194 | self.url = remote_maker.url | |
195 | self.stream_url = remote_maker.stream_url |
|
195 | self.stream_url = remote_maker.stream_url | |
196 | self._session = remote_maker._session_factory() |
|
196 | self._session = remote_maker._session_factory() | |
197 |
|
197 | |||
198 | cache_repo_id = self._repo_id_sanitizer(repo_id) |
|
198 | cache_repo_id = self._repo_id_sanitizer(repo_id) | |
199 | _repo_name = self._get_repo_name(config, path) |
|
199 | _repo_name = self._get_repo_name(config, path) | |
200 | self._cache_region, self._cache_namespace = \ |
|
200 | self._cache_region, self._cache_namespace = \ | |
201 | remote_maker.init_cache_region(cache_repo_id) |
|
201 | remote_maker.init_cache_region(cache_repo_id) | |
202 |
|
202 | |||
203 | with_wire = with_wire or {} |
|
203 | with_wire = with_wire or {} | |
204 |
|
204 | |||
205 | repo_state_uid = with_wire.get('repo_state_uid') or 'state' |
|
205 | repo_state_uid = with_wire.get('repo_state_uid') or 'state' | |
206 |
|
206 | |||
207 | self._wire = { |
|
207 | self._wire = { | |
208 | "_repo_name": _repo_name, |
|
208 | "_repo_name": _repo_name, | |
209 | "path": path, # repo path |
|
209 | "path": path, # repo path | |
210 | "repo_id": repo_id, |
|
210 | "repo_id": repo_id, | |
211 | "cache_repo_id": cache_repo_id, |
|
211 | "cache_repo_id": cache_repo_id, | |
212 | "config": config, |
|
212 | "config": config, | |
213 | "repo_state_uid": repo_state_uid, |
|
213 | "repo_state_uid": repo_state_uid, | |
214 | "context": self._create_vcs_cache_context(path, repo_state_uid) |
|
214 | "context": self._create_vcs_cache_context(path, repo_state_uid) | |
215 | } |
|
215 | } | |
216 |
|
216 | |||
217 | if with_wire: |
|
217 | if with_wire: | |
218 | self._wire.update(with_wire) |
|
218 | self._wire.update(with_wire) | |
219 |
|
219 | |||
220 | # NOTE(johbo): Trading complexity for performance. Avoiding the call to |
|
220 | # NOTE(johbo): Trading complexity for performance. Avoiding the call to | |
221 | # log.debug brings a few percent gain even if is is not active. |
|
221 | # log.debug brings a few percent gain even if is is not active. | |
222 | if log.isEnabledFor(logging.DEBUG): |
|
222 | if log.isEnabledFor(logging.DEBUG): | |
223 | self._call_with_logging = True |
|
223 | self._call_with_logging = True | |
224 |
|
224 | |||
225 | self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) |
|
225 | self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) | |
226 |
|
226 | |||
227 | def _get_repo_name(self, config, path): |
|
227 | def _get_repo_name(self, config, path): | |
228 | repo_store = config.get('paths', '/') |
|
228 | repo_store = config.get('paths', '/') | |
229 | return path.split(repo_store)[-1].lstrip('/') |
|
229 | return path.split(repo_store)[-1].lstrip('/') | |
230 |
|
230 | |||
231 | def _repo_id_sanitizer(self, repo_id): |
|
231 | def _repo_id_sanitizer(self, repo_id): | |
232 | pathless = repo_id.replace('/', '__').replace('-', '_') |
|
232 | pathless = repo_id.replace('/', '__').replace('-', '_') | |
233 | return ''.join(char if ord(char) < 128 else '_{}_'.format(ord(char)) for char in pathless) |
|
233 | return ''.join(char if ord(char) < 128 else '_{}_'.format(ord(char)) for char in pathless) | |
234 |
|
234 | |||
235 | def __getattr__(self, name): |
|
235 | def __getattr__(self, name): | |
236 |
|
236 | |||
237 | if name.startswith('stream:'): |
|
237 | if name.startswith('stream:'): | |
238 | def repo_remote_attr(*args, **kwargs): |
|
238 | def repo_remote_attr(*args, **kwargs): | |
239 | return self._call_stream(name, *args, **kwargs) |
|
239 | return self._call_stream(name, *args, **kwargs) | |
240 | else: |
|
240 | else: | |
241 | def repo_remote_attr(*args, **kwargs): |
|
241 | def repo_remote_attr(*args, **kwargs): | |
242 | return self._call(name, *args, **kwargs) |
|
242 | return self._call(name, *args, **kwargs) | |
243 |
|
243 | |||
244 | return repo_remote_attr |
|
244 | return repo_remote_attr | |
245 |
|
245 | |||
246 | def _base_call(self, name, *args, **kwargs): |
|
246 | def _base_call(self, name, *args, **kwargs): | |
247 | # TODO: oliver: This is currently necessary pre-call since the |
|
247 | # TODO: oliver: This is currently necessary pre-call since the | |
248 | # config object is being changed for hooking scenarios |
|
248 | # config object is being changed for hooking scenarios | |
249 | wire = copy.deepcopy(self._wire) |
|
249 | wire = copy.deepcopy(self._wire) | |
250 | wire["config"] = wire["config"].serialize() |
|
250 | wire["config"] = wire["config"].serialize() | |
251 | wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) |
|
251 | wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) | |
252 |
|
252 | |||
253 | payload = { |
|
253 | payload = { | |
254 | 'id': str(uuid.uuid4()), |
|
254 | 'id': str(uuid.uuid4()), | |
255 | 'method': name, |
|
255 | 'method': name, | |
256 | "_repo_name": wire['_repo_name'], |
|
256 | "_repo_name": wire['_repo_name'], | |
257 | 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} |
|
257 | 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} | |
258 | } |
|
258 | } | |
259 |
|
259 | |||
260 | context_uid = wire.get('context') |
|
260 | context_uid = wire.get('context') | |
261 | return context_uid, payload |
|
261 | return context_uid, payload | |
262 |
|
262 | |||
263 | def get_local_cache(self, name, args): |
|
263 | def get_local_cache(self, name, args): | |
264 | cache_on = False |
|
264 | cache_on = False | |
265 | cache_key = '' |
|
265 | cache_key = '' | |
266 | local_cache_on = str2bool(rhodecode.CONFIG.get('vcs.methods.cache')) |
|
266 | local_cache_on = str2bool(rhodecode.CONFIG.get('vcs.methods.cache')) | |
267 |
|
267 | |||
268 | cache_methods = [ |
|
268 | cache_methods = [ | |
269 | 'branches', 'tags', 'bookmarks', |
|
269 | 'branches', 'tags', 'bookmarks', | |
270 | 'is_large_file', 'is_binary', |
|
270 | 'is_large_file', 'is_binary', | |
271 | 'fctx_size', 'stream:fctx_node_data', 'blob_raw_length', |
|
271 | 'fctx_size', 'stream:fctx_node_data', 'blob_raw_length', | |
272 | 'node_history', |
|
272 | 'node_history', | |
273 | 'revision', 'tree_items', |
|
273 | 'revision', 'tree_items', | |
274 | 'ctx_list', 'ctx_branch', 'ctx_description', |
|
274 | 'ctx_list', 'ctx_branch', 'ctx_description', | |
275 | 'bulk_request', |
|
275 | 'bulk_request', | |
276 | 'assert_correct_path' |
|
276 | 'assert_correct_path' | |
277 | ] |
|
277 | ] | |
278 |
|
278 | |||
279 | if local_cache_on and name in cache_methods: |
|
279 | if local_cache_on and name in cache_methods: | |
280 | cache_on = True |
|
280 | cache_on = True | |
281 | repo_state_uid = self._wire['repo_state_uid'] |
|
281 | repo_state_uid = self._wire['repo_state_uid'] | |
282 | call_args = [a for a in args] |
|
282 | call_args = [a for a in args] | |
283 | cache_key = compute_key_from_params(repo_state_uid, name, *call_args) |
|
283 | cache_key = compute_key_from_params(repo_state_uid, name, *call_args) | |
284 |
|
284 | |||
285 | return cache_on, cache_key |
|
285 | return cache_on, cache_key | |
286 |
|
286 | |||
287 | @exceptions.map_vcs_exceptions |
|
287 | @exceptions.map_vcs_exceptions | |
288 | def _call(self, name, *args, **kwargs): |
|
288 | def _call(self, name, *args, **kwargs): | |
289 | context_uid, payload = self._base_call(name, *args, **kwargs) |
|
289 | context_uid, payload = self._base_call(name, *args, **kwargs) | |
290 | url = self.url |
|
290 | url = self.url | |
291 |
|
291 | |||
292 | start = time.time() |
|
292 | start = time.time() | |
293 | cache_on, cache_key = self.get_local_cache(name, args) |
|
293 | cache_on, cache_key = self.get_local_cache(name, args) | |
294 |
|
294 | |||
295 | @self._cache_region.conditional_cache_on_arguments( |
|
295 | @self._cache_region.conditional_cache_on_arguments( | |
296 | namespace=self._cache_namespace, condition=cache_on and cache_key) |
|
296 | namespace=self._cache_namespace, condition=cache_on and cache_key) | |
297 | def remote_call(_cache_key): |
|
297 | def remote_call(_cache_key): | |
298 | if self._call_with_logging: |
|
298 | if self._call_with_logging: | |
299 | log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s', |
|
299 | log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s', | |
300 | url, name, args, context_uid, cache_on) |
|
300 | url, name, args, context_uid, cache_on) | |
301 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session) |
|
301 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session) | |
302 |
|
302 | |||
303 | result = remote_call(cache_key) |
|
303 | result = remote_call(cache_key) | |
304 | if self._call_with_logging: |
|
304 | if self._call_with_logging: | |
305 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', |
|
305 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', | |
306 | url, name, time.time()-start, context_uid) |
|
306 | url, name, time.time()-start, context_uid) | |
307 | return result |
|
307 | return result | |
308 |
|
308 | |||
309 | @exceptions.map_vcs_exceptions |
|
309 | @exceptions.map_vcs_exceptions | |
310 | def _call_stream(self, name, *args, **kwargs): |
|
310 | def _call_stream(self, name, *args, **kwargs): | |
311 | context_uid, payload = self._base_call(name, *args, **kwargs) |
|
311 | context_uid, payload = self._base_call(name, *args, **kwargs) | |
312 | payload['chunk_size'] = self.CHUNK_SIZE |
|
312 | payload['chunk_size'] = self.CHUNK_SIZE | |
313 | url = self.stream_url |
|
313 | url = self.stream_url | |
314 |
|
314 | |||
315 | start = time.time() |
|
315 | start = time.time() | |
316 | cache_on, cache_key = self.get_local_cache(name, args) |
|
316 | cache_on, cache_key = self.get_local_cache(name, args) | |
317 |
|
317 | |||
318 | # Cache is a problem because this is a stream |
|
318 | # Cache is a problem because this is a stream | |
319 | def streaming_remote_call(_cache_key): |
|
319 | def streaming_remote_call(_cache_key): | |
320 | if self._call_with_logging: |
|
320 | if self._call_with_logging: | |
321 | log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s', |
|
321 | log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s', | |
322 | url, name, args, context_uid, cache_on) |
|
322 | url, name, args, context_uid, cache_on) | |
323 | return _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session, self.CHUNK_SIZE) |
|
323 | return _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session, self.CHUNK_SIZE) | |
324 |
|
324 | |||
325 | result = streaming_remote_call(cache_key) |
|
325 | result = streaming_remote_call(cache_key) | |
326 | if self._call_with_logging: |
|
326 | if self._call_with_logging: | |
327 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', |
|
327 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', | |
328 | url, name, time.time()-start, context_uid) |
|
328 | url, name, time.time()-start, context_uid) | |
329 | return result |
|
329 | return result | |
330 |
|
330 | |||
331 | def __getitem__(self, key): |
|
331 | def __getitem__(self, key): | |
332 | return self.revision(key) |
|
332 | return self.revision(key) | |
333 |
|
333 | |||
334 | def _create_vcs_cache_context(self, *args): |
|
334 | def _create_vcs_cache_context(self, *args): | |
335 | """ |
|
335 | """ | |
336 | Creates a unique string which is passed to the VCSServer on every |
|
336 | Creates a unique string which is passed to the VCSServer on every | |
337 | remote call. It is used as cache key in the VCSServer. |
|
337 | remote call. It is used as cache key in the VCSServer. | |
338 | """ |
|
338 | """ | |
339 | hash_key = '-'.join(map(str, args)) |
|
339 | hash_key = '-'.join(map(str, args)) | |
340 | return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key)) |
|
340 | return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key)) | |
341 |
|
341 | |||
342 | def invalidate_vcs_cache(self): |
|
342 | def invalidate_vcs_cache(self): | |
343 | """ |
|
343 | """ | |
344 | This invalidates the context which is sent to the VCSServer on every |
|
344 | This invalidates the context which is sent to the VCSServer on every | |
345 | call to a remote method. It forces the VCSServer to create a fresh |
|
345 | call to a remote method. It forces the VCSServer to create a fresh | |
346 | repository instance on the next call to a remote method. |
|
346 | repository instance on the next call to a remote method. | |
347 | """ |
|
347 | """ | |
348 | self._wire['context'] = str(uuid.uuid4()) |
|
348 | self._wire['context'] = str(uuid.uuid4()) | |
349 |
|
349 | |||
350 |
|
350 | |||
351 | class VcsHttpProxy(object): |
|
351 | class VcsHttpProxy(object): | |
352 |
|
352 | |||
353 | CHUNK_SIZE = 16384 |
|
353 | CHUNK_SIZE = 16384 | |
354 |
|
354 | |||
355 | def __init__(self, server_and_port, backend_endpoint): |
|
355 | def __init__(self, server_and_port, backend_endpoint): | |
356 | retries = Retry(total=5, connect=None, read=None, redirect=None) |
|
356 | retries = Retry(total=5, connect=None, read=None, redirect=None) | |
357 |
|
357 | |||
358 | adapter = requests.adapters.HTTPAdapter(max_retries=retries) |
|
358 | adapter = requests.adapters.HTTPAdapter(max_retries=retries) | |
359 | self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) |
|
359 | self.base_url = urllib.parse.urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
360 | self.session = requests.Session() |
|
360 | self.session = requests.Session() | |
361 | self.session.mount('http://', adapter) |
|
361 | self.session.mount('http://', adapter) | |
362 |
|
362 | |||
363 | def handle(self, environment, input_data, *args, **kwargs): |
|
363 | def handle(self, environment, input_data, *args, **kwargs): | |
364 | data = { |
|
364 | data = { | |
365 | 'environment': environment, |
|
365 | 'environment': environment, | |
366 | 'input_data': input_data, |
|
366 | 'input_data': input_data, | |
367 | 'args': args, |
|
367 | 'args': args, | |
368 | 'kwargs': kwargs |
|
368 | 'kwargs': kwargs | |
369 | } |
|
369 | } | |
370 | result = self.session.post( |
|
370 | result = self.session.post( | |
371 | self.base_url, msgpack.packb(data), stream=True) |
|
371 | self.base_url, msgpack.packb(data), stream=True) | |
372 | return self._get_result(result) |
|
372 | return self._get_result(result) | |
373 |
|
373 | |||
374 | def _deserialize_and_raise(self, error): |
|
374 | def _deserialize_and_raise(self, error): | |
375 | exception = Exception(error['message']) |
|
375 | exception = Exception(error['message']) | |
376 | try: |
|
376 | try: | |
377 | exception._vcs_kind = error['_vcs_kind'] |
|
377 | exception._vcs_kind = error['_vcs_kind'] | |
378 | except KeyError: |
|
378 | except KeyError: | |
379 | pass |
|
379 | pass | |
380 | raise exception |
|
380 | raise exception | |
381 |
|
381 | |||
382 | def _iterate(self, result): |
|
382 | def _iterate(self, result): | |
383 | unpacker = msgpack.Unpacker() |
|
383 | unpacker = msgpack.Unpacker() | |
384 | for line in result.iter_content(chunk_size=self.CHUNK_SIZE): |
|
384 | for line in result.iter_content(chunk_size=self.CHUNK_SIZE): | |
385 | unpacker.feed(line) |
|
385 | unpacker.feed(line) | |
386 | for chunk in unpacker: |
|
386 | for chunk in unpacker: | |
387 | yield chunk |
|
387 | yield chunk | |
388 |
|
388 | |||
389 | def _get_result(self, result): |
|
389 | def _get_result(self, result): | |
390 | iterator = self._iterate(result) |
|
390 | iterator = self._iterate(result) | |
391 | error = iterator.next() |
|
391 | error = iterator.next() | |
392 | if error: |
|
392 | if error: | |
393 | self._deserialize_and_raise(error) |
|
393 | self._deserialize_and_raise(error) | |
394 |
|
394 | |||
395 | status = iterator.next() |
|
395 | status = iterator.next() | |
396 | headers = iterator.next() |
|
396 | headers = iterator.next() | |
397 |
|
397 | |||
398 | return iterator, status, headers |
|
398 | return iterator, status, headers | |
399 |
|
399 | |||
400 |
|
400 | |||
401 | class ThreadlocalSessionFactory(object): |
|
401 | class ThreadlocalSessionFactory(object): | |
402 | """ |
|
402 | """ | |
403 | Creates one CurlSession per thread on demand. |
|
403 | Creates one CurlSession per thread on demand. | |
404 | """ |
|
404 | """ | |
405 |
|
405 | |||
406 | def __init__(self): |
|
406 | def __init__(self): | |
407 | self._thread_local = threading.local() |
|
407 | self._thread_local = threading.local() | |
408 |
|
408 | |||
409 | def __call__(self): |
|
409 | def __call__(self): | |
410 | if not hasattr(self._thread_local, 'curl_session'): |
|
410 | if not hasattr(self._thread_local, 'curl_session'): | |
411 | self._thread_local.curl_session = CurlSession() |
|
411 | self._thread_local.curl_session = CurlSession() | |
412 | return self._thread_local.curl_session |
|
412 | return self._thread_local.curl_session |
@@ -1,59 +1,59 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | import deform.widget |
|
23 | import deform.widget | |
24 |
from deform.widget import null, OptGroup |
|
24 | from deform.widget import null, OptGroup | |
25 |
|
25 | |||
26 | log = logging.getLogger(__name__) |
|
26 | log = logging.getLogger(__name__) | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | def _normalize_choices(values): |
|
29 | def _normalize_choices(values): | |
30 | result = [] |
|
30 | result = [] | |
31 | for item in values: |
|
31 | for item in values: | |
32 | if isinstance(item, OptGroup): |
|
32 | if isinstance(item, OptGroup): | |
33 | normalized_options = _normalize_choices(item.options) |
|
33 | normalized_options = _normalize_choices(item.options) | |
34 | result.append(OptGroup(item.label, *normalized_options)) |
|
34 | result.append(OptGroup(item.label, *normalized_options)) | |
35 | else: |
|
35 | else: | |
36 | value, description, help_block = item |
|
36 | value, description, help_block = item | |
37 |
if not isinstance(value, str |
|
37 | if not isinstance(value, str): | |
38 | value = str(value) |
|
38 | value = str(value) | |
39 | result.append((value, description, help_block)) |
|
39 | result.append((value, description, help_block)) | |
40 | return result |
|
40 | return result | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class CodeMirrorWidget(deform.widget.TextAreaWidget): |
|
43 | class CodeMirrorWidget(deform.widget.TextAreaWidget): | |
44 | template = 'codemirror' |
|
44 | template = 'codemirror' | |
45 | requirements = (('deform', None), ('codemirror', None)) |
|
45 | requirements = (('deform', None), ('codemirror', None)) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | class CheckboxChoiceWidgetDesc(deform.widget.CheckboxChoiceWidget): |
|
48 | class CheckboxChoiceWidgetDesc(deform.widget.CheckboxChoiceWidget): | |
49 | template = "checkbox_choice_desc" |
|
49 | template = "checkbox_choice_desc" | |
50 |
|
50 | |||
51 | def serialize(self, field, cstruct, **kw): |
|
51 | def serialize(self, field, cstruct, **kw): | |
52 | if cstruct in (null, None): |
|
52 | if cstruct in (null, None): | |
53 | cstruct = () |
|
53 | cstruct = () | |
54 | readonly = kw.get("readonly", self.readonly) |
|
54 | readonly = kw.get("readonly", self.readonly) | |
55 | values = kw.get("values", self.values) |
|
55 | values = kw.get("values", self.values) | |
56 | kw["values"] = _normalize_choices(values) |
|
56 | kw["values"] = _normalize_choices(values) | |
57 | template = readonly and self.readonly_template or self.template |
|
57 | template = readonly and self.readonly_template or self.template | |
58 | tmpl_values = self.get_template_values(field, cstruct, kw) |
|
58 | tmpl_values = self.get_template_values(field, cstruct, kw) | |
59 | return field.renderer(template, **tmpl_values) |
|
59 | return field.renderer(template, **tmpl_values) |
@@ -1,140 +1,140 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 | import urlparse |
|
22 | import urllib.parse | |
23 | import mock |
|
23 | import mock | |
24 | import simplejson as json |
|
24 | import simplejson as json | |
25 |
|
25 | |||
26 | from rhodecode.lib.vcs.backends.base import Config |
|
26 | from rhodecode.lib.vcs.backends.base import Config | |
27 | from rhodecode.tests.lib.middleware import mock_scm_app |
|
27 | from rhodecode.tests.lib.middleware import mock_scm_app | |
28 | import rhodecode.lib.middleware.simplegit as simplegit |
|
28 | import rhodecode.lib.middleware.simplegit as simplegit | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def get_environ(url, request_method): |
|
31 | def get_environ(url, request_method): | |
32 | """Construct a minimum WSGI environ based on the URL.""" |
|
32 | """Construct a minimum WSGI environ based on the URL.""" | |
33 | parsed_url = urlparse.urlparse(url) |
|
33 | parsed_url = urllib.parse.urlparse.urlparse(url) | |
34 | environ = { |
|
34 | environ = { | |
35 | 'PATH_INFO': parsed_url.path, |
|
35 | 'PATH_INFO': parsed_url.path, | |
36 | 'QUERY_STRING': parsed_url.query, |
|
36 | 'QUERY_STRING': parsed_url.query, | |
37 | 'REQUEST_METHOD': request_method, |
|
37 | 'REQUEST_METHOD': request_method, | |
38 | } |
|
38 | } | |
39 |
|
39 | |||
40 | return environ |
|
40 | return environ | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | @pytest.mark.parametrize( |
|
43 | @pytest.mark.parametrize( | |
44 | 'url, expected_action, request_method', |
|
44 | 'url, expected_action, request_method', | |
45 | [ |
|
45 | [ | |
46 | ('/foo/bar/info/refs?service=git-upload-pack', 'pull', 'GET'), |
|
46 | ('/foo/bar/info/refs?service=git-upload-pack', 'pull', 'GET'), | |
47 | ('/foo/bar/info/refs?service=git-receive-pack', 'push', 'GET'), |
|
47 | ('/foo/bar/info/refs?service=git-receive-pack', 'push', 'GET'), | |
48 | ('/foo/bar/git-upload-pack', 'pull', 'GET'), |
|
48 | ('/foo/bar/git-upload-pack', 'pull', 'GET'), | |
49 | ('/foo/bar/git-receive-pack', 'push', 'GET'), |
|
49 | ('/foo/bar/git-receive-pack', 'push', 'GET'), | |
50 | # Edge case: missing data for info/refs |
|
50 | # Edge case: missing data for info/refs | |
51 | ('/foo/info/refs?service=', 'pull', 'GET'), |
|
51 | ('/foo/info/refs?service=', 'pull', 'GET'), | |
52 | ('/foo/info/refs', 'pull', 'GET'), |
|
52 | ('/foo/info/refs', 'pull', 'GET'), | |
53 | # Edge case: git command comes with service argument |
|
53 | # Edge case: git command comes with service argument | |
54 | ('/foo/git-upload-pack?service=git-receive-pack', 'pull', 'GET'), |
|
54 | ('/foo/git-upload-pack?service=git-receive-pack', 'pull', 'GET'), | |
55 | ('/foo/git-receive-pack?service=git-upload-pack', 'push', 'GET'), |
|
55 | ('/foo/git-receive-pack?service=git-upload-pack', 'push', 'GET'), | |
56 | # Edge case: repo name conflicts with git commands |
|
56 | # Edge case: repo name conflicts with git commands | |
57 | ('/git-receive-pack/git-upload-pack', 'pull', 'GET'), |
|
57 | ('/git-receive-pack/git-upload-pack', 'pull', 'GET'), | |
58 | ('/git-receive-pack/git-receive-pack', 'push', 'GET'), |
|
58 | ('/git-receive-pack/git-receive-pack', 'push', 'GET'), | |
59 | ('/git-upload-pack/git-upload-pack', 'pull', 'GET'), |
|
59 | ('/git-upload-pack/git-upload-pack', 'pull', 'GET'), | |
60 | ('/git-upload-pack/git-receive-pack', 'push', 'GET'), |
|
60 | ('/git-upload-pack/git-receive-pack', 'push', 'GET'), | |
61 | ('/foo/git-receive-pack', 'push', 'GET'), |
|
61 | ('/foo/git-receive-pack', 'push', 'GET'), | |
62 | # Edge case: not a smart protocol url |
|
62 | # Edge case: not a smart protocol url | |
63 | ('/foo/bar', 'pull', 'GET'), |
|
63 | ('/foo/bar', 'pull', 'GET'), | |
64 | # GIT LFS cases, batch |
|
64 | # GIT LFS cases, batch | |
65 | ('/foo/bar/info/lfs/objects/batch', 'push', 'GET'), |
|
65 | ('/foo/bar/info/lfs/objects/batch', 'push', 'GET'), | |
66 | ('/foo/bar/info/lfs/objects/batch', 'pull', 'POST'), |
|
66 | ('/foo/bar/info/lfs/objects/batch', 'pull', 'POST'), | |
67 | # GIT LFS oid, dl/upl |
|
67 | # GIT LFS oid, dl/upl | |
68 | ('/foo/bar/info/lfs/abcdeabcde', 'pull', 'GET'), |
|
68 | ('/foo/bar/info/lfs/abcdeabcde', 'pull', 'GET'), | |
69 | ('/foo/bar/info/lfs/abcdeabcde', 'push', 'PUT'), |
|
69 | ('/foo/bar/info/lfs/abcdeabcde', 'push', 'PUT'), | |
70 | ('/foo/bar/info/lfs/abcdeabcde', 'push', 'POST'), |
|
70 | ('/foo/bar/info/lfs/abcdeabcde', 'push', 'POST'), | |
71 | # Edge case: repo name conflicts with git commands |
|
71 | # Edge case: repo name conflicts with git commands | |
72 | ('/info/lfs/info/lfs/objects/batch', 'push', 'GET'), |
|
72 | ('/info/lfs/info/lfs/objects/batch', 'push', 'GET'), | |
73 | ('/info/lfs/info/lfs/objects/batch', 'pull', 'POST'), |
|
73 | ('/info/lfs/info/lfs/objects/batch', 'pull', 'POST'), | |
74 |
|
74 | |||
75 | ]) |
|
75 | ]) | |
76 | def test_get_action(url, expected_action, request_method, baseapp, request_stub): |
|
76 | def test_get_action(url, expected_action, request_method, baseapp, request_stub): | |
77 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, |
|
77 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, | |
78 | registry=request_stub.registry) |
|
78 | registry=request_stub.registry) | |
79 | assert expected_action == app._get_action(get_environ(url, request_method)) |
|
79 | assert expected_action == app._get_action(get_environ(url, request_method)) | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | @pytest.mark.parametrize( |
|
82 | @pytest.mark.parametrize( | |
83 | 'url, expected_repo_name, request_method', |
|
83 | 'url, expected_repo_name, request_method', | |
84 | [ |
|
84 | [ | |
85 | ('/foo/info/refs?service=git-upload-pack', 'foo', 'GET'), |
|
85 | ('/foo/info/refs?service=git-upload-pack', 'foo', 'GET'), | |
86 | ('/foo/bar/info/refs?service=git-receive-pack', 'foo/bar', 'GET'), |
|
86 | ('/foo/bar/info/refs?service=git-receive-pack', 'foo/bar', 'GET'), | |
87 | ('/foo/git-upload-pack', 'foo', 'GET'), |
|
87 | ('/foo/git-upload-pack', 'foo', 'GET'), | |
88 | ('/foo/git-receive-pack', 'foo', 'GET'), |
|
88 | ('/foo/git-receive-pack', 'foo', 'GET'), | |
89 | ('/foo/bar/git-upload-pack', 'foo/bar', 'GET'), |
|
89 | ('/foo/bar/git-upload-pack', 'foo/bar', 'GET'), | |
90 | ('/foo/bar/git-receive-pack', 'foo/bar', 'GET'), |
|
90 | ('/foo/bar/git-receive-pack', 'foo/bar', 'GET'), | |
91 |
|
91 | |||
92 | # GIT LFS cases, batch |
|
92 | # GIT LFS cases, batch | |
93 | ('/foo/bar/info/lfs/objects/batch', 'foo/bar', 'GET'), |
|
93 | ('/foo/bar/info/lfs/objects/batch', 'foo/bar', 'GET'), | |
94 | ('/example-git/info/lfs/objects/batch', 'example-git', 'POST'), |
|
94 | ('/example-git/info/lfs/objects/batch', 'example-git', 'POST'), | |
95 | # GIT LFS oid, dl/upl |
|
95 | # GIT LFS oid, dl/upl | |
96 | ('/foo/info/lfs/abcdeabcde', 'foo', 'GET'), |
|
96 | ('/foo/info/lfs/abcdeabcde', 'foo', 'GET'), | |
97 | ('/foo/bar/info/lfs/abcdeabcde', 'foo/bar', 'PUT'), |
|
97 | ('/foo/bar/info/lfs/abcdeabcde', 'foo/bar', 'PUT'), | |
98 | ('/my-git-repo/info/lfs/abcdeabcde', 'my-git-repo', 'POST'), |
|
98 | ('/my-git-repo/info/lfs/abcdeabcde', 'my-git-repo', 'POST'), | |
99 | # Edge case: repo name conflicts with git commands |
|
99 | # Edge case: repo name conflicts with git commands | |
100 | ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'GET'), |
|
100 | ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'GET'), | |
101 | ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'POST'), |
|
101 | ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'POST'), | |
102 |
|
102 | |||
103 | ]) |
|
103 | ]) | |
104 | def test_get_repository_name(url, expected_repo_name, request_method, baseapp, request_stub): |
|
104 | def test_get_repository_name(url, expected_repo_name, request_method, baseapp, request_stub): | |
105 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, |
|
105 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, | |
106 | registry=request_stub.registry) |
|
106 | registry=request_stub.registry) | |
107 | assert expected_repo_name == app._get_repository_name( |
|
107 | assert expected_repo_name == app._get_repository_name( | |
108 | get_environ(url, request_method)) |
|
108 | get_environ(url, request_method)) | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | def test_get_config(user_util, baseapp, request_stub): |
|
111 | def test_get_config(user_util, baseapp, request_stub): | |
112 | repo = user_util.create_repo(repo_type='git') |
|
112 | repo = user_util.create_repo(repo_type='git') | |
113 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, |
|
113 | app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''}, | |
114 | registry=request_stub.registry) |
|
114 | registry=request_stub.registry) | |
115 | extras = {'foo': 'FOO', 'bar': 'BAR'} |
|
115 | extras = {'foo': 'FOO', 'bar': 'BAR'} | |
116 |
|
116 | |||
117 | # We copy the extras as the method below will change the contents. |
|
117 | # We copy the extras as the method below will change the contents. | |
118 | git_config = app._create_config(dict(extras), repo_name=repo.repo_name) |
|
118 | git_config = app._create_config(dict(extras), repo_name=repo.repo_name) | |
119 |
|
119 | |||
120 | expected_config = dict(extras) |
|
120 | expected_config = dict(extras) | |
121 | expected_config.update({ |
|
121 | expected_config.update({ | |
122 | 'git_update_server_info': False, |
|
122 | 'git_update_server_info': False, | |
123 | 'git_lfs_enabled': False, |
|
123 | 'git_lfs_enabled': False, | |
124 | 'git_lfs_store_path': git_config['git_lfs_store_path'], |
|
124 | 'git_lfs_store_path': git_config['git_lfs_store_path'], | |
125 | 'git_lfs_http_scheme': 'http' |
|
125 | 'git_lfs_http_scheme': 'http' | |
126 | }) |
|
126 | }) | |
127 |
|
127 | |||
128 | assert git_config == expected_config |
|
128 | assert git_config == expected_config | |
129 |
|
129 | |||
130 |
|
130 | |||
131 | def test_create_wsgi_app_uses_scm_app_from_simplevcs(baseapp, request_stub): |
|
131 | def test_create_wsgi_app_uses_scm_app_from_simplevcs(baseapp, request_stub): | |
132 | config = { |
|
132 | config = { | |
133 | 'auth_ret_code': '', |
|
133 | 'auth_ret_code': '', | |
134 | 'base_path': '', |
|
134 | 'base_path': '', | |
135 | 'vcs.scm_app_implementation': |
|
135 | 'vcs.scm_app_implementation': | |
136 | 'rhodecode.tests.lib.middleware.mock_scm_app', |
|
136 | 'rhodecode.tests.lib.middleware.mock_scm_app', | |
137 | } |
|
137 | } | |
138 | app = simplegit.SimpleGit(config=config, registry=request_stub.registry) |
|
138 | app = simplegit.SimpleGit(config=config, registry=request_stub.registry) | |
139 | wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {}) |
|
139 | wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {}) | |
140 | assert wsgi_app is mock_scm_app.mock_git_wsgi |
|
140 | assert wsgi_app is mock_scm_app.mock_git_wsgi |
@@ -1,156 +1,156 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import urlparse |
|
21 | import urllib.parse | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 | import simplejson as json |
|
25 | import simplejson as json | |
26 |
|
26 | |||
27 | from rhodecode.lib.vcs.backends.base import Config |
|
27 | from rhodecode.lib.vcs.backends.base import Config | |
28 | from rhodecode.tests.lib.middleware import mock_scm_app |
|
28 | from rhodecode.tests.lib.middleware import mock_scm_app | |
29 | import rhodecode.lib.middleware.simplehg as simplehg |
|
29 | import rhodecode.lib.middleware.simplehg as simplehg | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def get_environ(url): |
|
32 | def get_environ(url): | |
33 | """Construct a minimum WSGI environ based on the URL.""" |
|
33 | """Construct a minimum WSGI environ based on the URL.""" | |
34 | parsed_url = urlparse.urlparse(url) |
|
34 | parsed_url = urllib.parse.urlparse.urlparse(url) | |
35 | environ = { |
|
35 | environ = { | |
36 | 'PATH_INFO': parsed_url.path, |
|
36 | 'PATH_INFO': parsed_url.path, | |
37 | 'QUERY_STRING': parsed_url.query, |
|
37 | 'QUERY_STRING': parsed_url.query, | |
38 | } |
|
38 | } | |
39 |
|
39 | |||
40 | return environ |
|
40 | return environ | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | @pytest.mark.parametrize( |
|
43 | @pytest.mark.parametrize( | |
44 | 'url, expected_action', |
|
44 | 'url, expected_action', | |
45 | [ |
|
45 | [ | |
46 | ('/foo/bar?cmd=unbundle&key=tip', 'push'), |
|
46 | ('/foo/bar?cmd=unbundle&key=tip', 'push'), | |
47 | ('/foo/bar?cmd=pushkey&key=tip', 'push'), |
|
47 | ('/foo/bar?cmd=pushkey&key=tip', 'push'), | |
48 | ('/foo/bar?cmd=listkeys&key=tip', 'pull'), |
|
48 | ('/foo/bar?cmd=listkeys&key=tip', 'pull'), | |
49 | ('/foo/bar?cmd=changegroup&key=tip', 'pull'), |
|
49 | ('/foo/bar?cmd=changegroup&key=tip', 'pull'), | |
50 | ('/foo/bar?cmd=hello', 'pull'), |
|
50 | ('/foo/bar?cmd=hello', 'pull'), | |
51 | ('/foo/bar?cmd=batch', 'push'), |
|
51 | ('/foo/bar?cmd=batch', 'push'), | |
52 | ('/foo/bar?cmd=putlfile', 'push'), |
|
52 | ('/foo/bar?cmd=putlfile', 'push'), | |
53 | # Edge case: unknown argument: assume push |
|
53 | # Edge case: unknown argument: assume push | |
54 | ('/foo/bar?cmd=unknown&key=tip', 'push'), |
|
54 | ('/foo/bar?cmd=unknown&key=tip', 'push'), | |
55 | ('/foo/bar?cmd=&key=tip', 'push'), |
|
55 | ('/foo/bar?cmd=&key=tip', 'push'), | |
56 | # Edge case: not cmd argument |
|
56 | # Edge case: not cmd argument | |
57 | ('/foo/bar?key=tip', 'push'), |
|
57 | ('/foo/bar?key=tip', 'push'), | |
58 | ]) |
|
58 | ]) | |
59 | def test_get_action(url, expected_action, request_stub): |
|
59 | def test_get_action(url, expected_action, request_stub): | |
60 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, |
|
60 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, | |
61 | registry=request_stub.registry) |
|
61 | registry=request_stub.registry) | |
62 | assert expected_action == app._get_action(get_environ(url)) |
|
62 | assert expected_action == app._get_action(get_environ(url)) | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | @pytest.mark.parametrize( |
|
65 | @pytest.mark.parametrize( | |
66 | 'environ, expected_xargs, expected_batch', |
|
66 | 'environ, expected_xargs, expected_batch', | |
67 | [ |
|
67 | [ | |
68 | ({}, |
|
68 | ({}, | |
69 | [''], ['push']), |
|
69 | [''], ['push']), | |
70 |
|
70 | |||
71 | ({'HTTP_X_HGARG_1': ''}, |
|
71 | ({'HTTP_X_HGARG_1': ''}, | |
72 | [''], ['push']), |
|
72 | [''], ['push']), | |
73 |
|
73 | |||
74 | ({'HTTP_X_HGARG_1': 'cmds=listkeys+namespace%3Dphases'}, |
|
74 | ({'HTTP_X_HGARG_1': 'cmds=listkeys+namespace%3Dphases'}, | |
75 | ['listkeys namespace=phases'], ['pull']), |
|
75 | ['listkeys namespace=phases'], ['pull']), | |
76 |
|
76 | |||
77 | ({'HTTP_X_HGARG_1': 'cmds=pushkey+namespace%3Dbookmarks%2Ckey%3Dbm%2Cold%3D%2Cnew%3Dcb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b'}, |
|
77 | ({'HTTP_X_HGARG_1': 'cmds=pushkey+namespace%3Dbookmarks%2Ckey%3Dbm%2Cold%3D%2Cnew%3Dcb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b'}, | |
78 | ['pushkey namespace=bookmarks,key=bm,old=,new=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b'], ['push']), |
|
78 | ['pushkey namespace=bookmarks,key=bm,old=,new=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b'], ['push']), | |
79 |
|
79 | |||
80 | ({'HTTP_X_HGARG_1': 'namespace=phases'}, |
|
80 | ({'HTTP_X_HGARG_1': 'namespace=phases'}, | |
81 | ['namespace=phases'], ['push']), |
|
81 | ['namespace=phases'], ['push']), | |
82 |
|
82 | |||
83 | ]) |
|
83 | ]) | |
84 | def test_xarg_and_batch_commands(environ, expected_xargs, expected_batch): |
|
84 | def test_xarg_and_batch_commands(environ, expected_xargs, expected_batch): | |
85 | app = simplehg.SimpleHg |
|
85 | app = simplehg.SimpleHg | |
86 |
|
86 | |||
87 | result = app._get_xarg_headers(environ) |
|
87 | result = app._get_xarg_headers(environ) | |
88 | result_batch = app._get_batch_cmd(environ) |
|
88 | result_batch = app._get_batch_cmd(environ) | |
89 | assert expected_xargs == result |
|
89 | assert expected_xargs == result | |
90 | assert expected_batch == result_batch |
|
90 | assert expected_batch == result_batch | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | @pytest.mark.parametrize( |
|
93 | @pytest.mark.parametrize( | |
94 | 'url, expected_repo_name', |
|
94 | 'url, expected_repo_name', | |
95 | [ |
|
95 | [ | |
96 | ('/foo?cmd=unbundle&key=tip', 'foo'), |
|
96 | ('/foo?cmd=unbundle&key=tip', 'foo'), | |
97 | ('/foo/bar?cmd=pushkey&key=tip', 'foo/bar'), |
|
97 | ('/foo/bar?cmd=pushkey&key=tip', 'foo/bar'), | |
98 | ('/foo/bar/baz?cmd=listkeys&key=tip', 'foo/bar/baz'), |
|
98 | ('/foo/bar/baz?cmd=listkeys&key=tip', 'foo/bar/baz'), | |
99 | # Repos with trailing slashes. |
|
99 | # Repos with trailing slashes. | |
100 | ('/foo/?cmd=unbundle&key=tip', 'foo'), |
|
100 | ('/foo/?cmd=unbundle&key=tip', 'foo'), | |
101 | ('/foo/bar/?cmd=pushkey&key=tip', 'foo/bar'), |
|
101 | ('/foo/bar/?cmd=pushkey&key=tip', 'foo/bar'), | |
102 | ('/foo/bar/baz/?cmd=listkeys&key=tip', 'foo/bar/baz'), |
|
102 | ('/foo/bar/baz/?cmd=listkeys&key=tip', 'foo/bar/baz'), | |
103 | ]) |
|
103 | ]) | |
104 | def test_get_repository_name(url, expected_repo_name, request_stub): |
|
104 | def test_get_repository_name(url, expected_repo_name, request_stub): | |
105 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, |
|
105 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, | |
106 | registry=request_stub.registry) |
|
106 | registry=request_stub.registry) | |
107 | assert expected_repo_name == app._get_repository_name(get_environ(url)) |
|
107 | assert expected_repo_name == app._get_repository_name(get_environ(url)) | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | def test_get_config(user_util, baseapp, request_stub): |
|
110 | def test_get_config(user_util, baseapp, request_stub): | |
111 | repo = user_util.create_repo(repo_type='git') |
|
111 | repo = user_util.create_repo(repo_type='git') | |
112 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, |
|
112 | app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''}, | |
113 | registry=request_stub.registry) |
|
113 | registry=request_stub.registry) | |
114 | extras = [('foo', 'FOO', 'bar', 'BAR')] |
|
114 | extras = [('foo', 'FOO', 'bar', 'BAR')] | |
115 |
|
115 | |||
116 | hg_config = app._create_config(extras, repo_name=repo.repo_name) |
|
116 | hg_config = app._create_config(extras, repo_name=repo.repo_name) | |
117 |
|
117 | |||
118 | config = simplehg.utils.make_db_config(repo=repo.repo_name) |
|
118 | config = simplehg.utils.make_db_config(repo=repo.repo_name) | |
119 | config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
119 | config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras)) | |
120 | hg_config_org = config |
|
120 | hg_config_org = config | |
121 |
|
121 | |||
122 | expected_config = [ |
|
122 | expected_config = [ | |
123 | ('vcs_svn_tag', 'ff89f8c714d135d865f44b90e5413b88de19a55f', '/tags/*'), |
|
123 | ('vcs_svn_tag', 'ff89f8c714d135d865f44b90e5413b88de19a55f', '/tags/*'), | |
124 | ('web', 'push_ssl', 'False'), |
|
124 | ('web', 'push_ssl', 'False'), | |
125 | ('web', 'allow_push', '*'), |
|
125 | ('web', 'allow_push', '*'), | |
126 | ('web', 'allow_archive', 'gz zip bz2'), |
|
126 | ('web', 'allow_archive', 'gz zip bz2'), | |
127 | ('web', 'baseurl', '/'), |
|
127 | ('web', 'baseurl', '/'), | |
128 | ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')), |
|
128 | ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')), | |
129 | ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'), |
|
129 | ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'), | |
130 | ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'), |
|
130 | ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'), | |
131 | ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')), |
|
131 | ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')), | |
132 | ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'), |
|
132 | ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'), | |
133 | ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), |
|
133 | ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), | |
134 | ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'), |
|
134 | ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'), | |
135 | ('hooks', 'pretxnchangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), |
|
135 | ('hooks', 'pretxnchangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), | |
136 | ('hooks', 'changegroup.push_logger', 'python:vcsserver.hooks.log_push_action'), |
|
136 | ('hooks', 'changegroup.push_logger', 'python:vcsserver.hooks.log_push_action'), | |
137 | ('hooks', 'changegroup.repo_size', 'python:vcsserver.hooks.repo_size'), |
|
137 | ('hooks', 'changegroup.repo_size', 'python:vcsserver.hooks.repo_size'), | |
138 | ('phases', 'publish', 'True'), |
|
138 | ('phases', 'publish', 'True'), | |
139 | ('extensions', 'largefiles', ''), |
|
139 | ('extensions', 'largefiles', ''), | |
140 | ('paths', '/', hg_config_org.get('paths', '/')), |
|
140 | ('paths', '/', hg_config_org.get('paths', '/')), | |
141 | ('rhodecode', 'RC_SCM_DATA', '[["foo", "FOO", "bar", "BAR"]]') |
|
141 | ('rhodecode', 'RC_SCM_DATA', '[["foo", "FOO", "bar", "BAR"]]') | |
142 | ] |
|
142 | ] | |
143 | for entry in expected_config: |
|
143 | for entry in expected_config: | |
144 | assert entry in hg_config |
|
144 | assert entry in hg_config | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | def test_create_wsgi_app_uses_scm_app_from_simplevcs(request_stub): |
|
147 | def test_create_wsgi_app_uses_scm_app_from_simplevcs(request_stub): | |
148 | config = { |
|
148 | config = { | |
149 | 'auth_ret_code': '', |
|
149 | 'auth_ret_code': '', | |
150 | 'base_path': '', |
|
150 | 'base_path': '', | |
151 | 'vcs.scm_app_implementation': |
|
151 | 'vcs.scm_app_implementation': | |
152 | 'rhodecode.tests.lib.middleware.mock_scm_app', |
|
152 | 'rhodecode.tests.lib.middleware.mock_scm_app', | |
153 | } |
|
153 | } | |
154 | app = simplehg.SimpleHg(config=config, registry=request_stub.registry) |
|
154 | app = simplehg.SimpleHg(config=config, registry=request_stub.registry) | |
155 | wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {}) |
|
155 | wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {}) | |
156 | assert wsgi_app is mock_scm_app.mock_hg_wsgi |
|
156 | assert wsgi_app is mock_scm_app.mock_hg_wsgi |
@@ -1,468 +1,468 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import threading |
|
21 | import threading | |
22 | import time |
|
22 | import time | |
23 | import logging |
|
23 | import logging | |
24 | import os.path |
|
24 | import os.path | |
25 | import subprocess32 |
|
25 | import subprocess32 | |
26 | import tempfile |
|
26 | import tempfile | |
27 | import urllib.request, urllib.error, urllib.parse |
|
27 | import urllib.request, urllib.error, urllib.parse | |
28 | from lxml.html import fromstring, tostring |
|
28 | from lxml.html import fromstring, tostring | |
29 | from lxml.cssselect import CSSSelector |
|
29 | from lxml.cssselect import CSSSelector | |
30 | from urlparse import urlparse, parse_qsl |
|
30 | import urllib.parse.urlparse | |
31 | from urllib.parse import unquote_plus |
|
31 | from urllib.parse import unquote_plus | |
32 | import webob |
|
32 | import webob | |
33 |
|
33 | |||
34 |
from webtest.app import TestResponse, TestApp |
|
34 | from webtest.app import TestResponse, TestApp | |
35 | from webtest.compat import print_stderr |
|
35 | from webtest.compat import print_stderr | |
36 |
|
36 | |||
37 | import pytest |
|
37 | import pytest | |
38 | import rc_testdata |
|
38 | import rc_testdata | |
39 |
|
39 | |||
40 | from rhodecode.model.db import User, Repository |
|
40 | from rhodecode.model.db import User, Repository | |
41 | from rhodecode.model.meta import Session |
|
41 | from rhodecode.model.meta import Session | |
42 | from rhodecode.model.scm import ScmModel |
|
42 | from rhodecode.model.scm import ScmModel | |
43 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository |
|
43 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository | |
44 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
44 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
45 | from rhodecode.tests import login_user_session |
|
45 | from rhodecode.tests import login_user_session | |
46 |
|
46 | |||
47 | log = logging.getLogger(__name__) |
|
47 | log = logging.getLogger(__name__) | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | class CustomTestResponse(TestResponse): |
|
50 | class CustomTestResponse(TestResponse): | |
51 |
|
51 | |||
52 | def _save_output(self, out): |
|
52 | def _save_output(self, out): | |
53 | f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html') |
|
53 | f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html') | |
54 | f.write(out) |
|
54 | f.write(out) | |
55 | return f.name |
|
55 | return f.name | |
56 |
|
56 | |||
57 | def mustcontain(self, *strings, **kw): |
|
57 | def mustcontain(self, *strings, **kw): | |
58 | """ |
|
58 | """ | |
59 | Assert that the response contains all of the strings passed |
|
59 | Assert that the response contains all of the strings passed | |
60 | in as arguments. |
|
60 | in as arguments. | |
61 |
|
61 | |||
62 | Equivalent to:: |
|
62 | Equivalent to:: | |
63 |
|
63 | |||
64 | assert string in res |
|
64 | assert string in res | |
65 | """ |
|
65 | """ | |
66 | print_body = kw.pop('print_body', False) |
|
66 | print_body = kw.pop('print_body', False) | |
67 | if 'no' in kw: |
|
67 | if 'no' in kw: | |
68 | no = kw['no'] |
|
68 | no = kw['no'] | |
69 | del kw['no'] |
|
69 | del kw['no'] | |
70 |
if isinstance(no, str |
|
70 | if isinstance(no, str): | |
71 | no = [no] |
|
71 | no = [no] | |
72 | else: |
|
72 | else: | |
73 | no = [] |
|
73 | no = [] | |
74 | if kw: |
|
74 | if kw: | |
75 | raise TypeError( |
|
75 | raise TypeError( | |
76 | "The only keyword argument allowed is 'no' got %s" % kw) |
|
76 | "The only keyword argument allowed is 'no' got %s" % kw) | |
77 |
|
77 | |||
78 | f = self._save_output(str(self)) |
|
78 | f = self._save_output(str(self)) | |
79 |
|
79 | |||
80 | for s in strings: |
|
80 | for s in strings: | |
81 | if not s in self: |
|
81 | if not s in self: | |
82 | print_stderr("Actual response (no %r):" % s) |
|
82 | print_stderr("Actual response (no %r):" % s) | |
83 | print_stderr("body output saved as `%s`" % f) |
|
83 | print_stderr("body output saved as `%s`" % f) | |
84 | if print_body: |
|
84 | if print_body: | |
85 | print_stderr(str(self)) |
|
85 | print_stderr(str(self)) | |
86 | raise IndexError( |
|
86 | raise IndexError( | |
87 | "Body does not contain string %r, body output saved as %s" % (s, f)) |
|
87 | "Body does not contain string %r, body output saved as %s" % (s, f)) | |
88 |
|
88 | |||
89 | for no_s in no: |
|
89 | for no_s in no: | |
90 | if no_s in self: |
|
90 | if no_s in self: | |
91 | print_stderr("Actual response (has %r)" % no_s) |
|
91 | print_stderr("Actual response (has %r)" % no_s) | |
92 | print_stderr("body output saved as `%s`" % f) |
|
92 | print_stderr("body output saved as `%s`" % f) | |
93 | if print_body: |
|
93 | if print_body: | |
94 | print_stderr(str(self)) |
|
94 | print_stderr(str(self)) | |
95 | raise IndexError( |
|
95 | raise IndexError( | |
96 | "Body contains bad string %r, body output saved as %s" % (no_s, f)) |
|
96 | "Body contains bad string %r, body output saved as %s" % (no_s, f)) | |
97 |
|
97 | |||
98 | def assert_response(self): |
|
98 | def assert_response(self): | |
99 | return AssertResponse(self) |
|
99 | return AssertResponse(self) | |
100 |
|
100 | |||
101 | def get_session_from_response(self): |
|
101 | def get_session_from_response(self): | |
102 | """ |
|
102 | """ | |
103 | This returns the session from a response object. |
|
103 | This returns the session from a response object. | |
104 | """ |
|
104 | """ | |
105 | from rhodecode.lib.rc_beaker import session_factory_from_settings |
|
105 | from rhodecode.lib.rc_beaker import session_factory_from_settings | |
106 | session = session_factory_from_settings(self.test_app._pyramid_settings) |
|
106 | session = session_factory_from_settings(self.test_app._pyramid_settings) | |
107 | return session(self.request) |
|
107 | return session(self.request) | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | class TestRequest(webob.BaseRequest): |
|
110 | class TestRequest(webob.BaseRequest): | |
111 |
|
111 | |||
112 | # for py.test |
|
112 | # for py.test | |
113 | disabled = True |
|
113 | disabled = True | |
114 | ResponseClass = CustomTestResponse |
|
114 | ResponseClass = CustomTestResponse | |
115 |
|
115 | |||
116 | def add_response_callback(self, callback): |
|
116 | def add_response_callback(self, callback): | |
117 | pass |
|
117 | pass | |
118 |
|
118 | |||
119 |
|
119 | |||
120 | class CustomTestApp(TestApp): |
|
120 | class CustomTestApp(TestApp): | |
121 | """ |
|
121 | """ | |
122 | Custom app to make mustcontain more Useful, and extract special methods |
|
122 | Custom app to make mustcontain more Useful, and extract special methods | |
123 | """ |
|
123 | """ | |
124 | RequestClass = TestRequest |
|
124 | RequestClass = TestRequest | |
125 | rc_login_data = {} |
|
125 | rc_login_data = {} | |
126 | rc_current_session = None |
|
126 | rc_current_session = None | |
127 |
|
127 | |||
128 | def login(self, username=None, password=None): |
|
128 | def login(self, username=None, password=None): | |
129 | from rhodecode.lib import auth |
|
129 | from rhodecode.lib import auth | |
130 |
|
130 | |||
131 | if username and password: |
|
131 | if username and password: | |
132 | session = login_user_session(self, username, password) |
|
132 | session = login_user_session(self, username, password) | |
133 | else: |
|
133 | else: | |
134 | session = login_user_session(self) |
|
134 | session = login_user_session(self) | |
135 |
|
135 | |||
136 | self.rc_login_data['csrf_token'] = auth.get_csrf_token(session) |
|
136 | self.rc_login_data['csrf_token'] = auth.get_csrf_token(session) | |
137 | self.rc_current_session = session |
|
137 | self.rc_current_session = session | |
138 | return session['rhodecode_user'] |
|
138 | return session['rhodecode_user'] | |
139 |
|
139 | |||
140 | @property |
|
140 | @property | |
141 | def csrf_token(self): |
|
141 | def csrf_token(self): | |
142 | return self.rc_login_data['csrf_token'] |
|
142 | return self.rc_login_data['csrf_token'] | |
143 |
|
143 | |||
144 | @property |
|
144 | @property | |
145 | def _pyramid_registry(self): |
|
145 | def _pyramid_registry(self): | |
146 | return self.app.config.registry |
|
146 | return self.app.config.registry | |
147 |
|
147 | |||
148 | @property |
|
148 | @property | |
149 | def _pyramid_settings(self): |
|
149 | def _pyramid_settings(self): | |
150 | return self._pyramid_registry.settings |
|
150 | return self._pyramid_registry.settings | |
151 |
|
151 | |||
152 |
|
152 | |||
153 | def set_anonymous_access(enabled): |
|
153 | def set_anonymous_access(enabled): | |
154 | """(Dis)allows anonymous access depending on parameter `enabled`""" |
|
154 | """(Dis)allows anonymous access depending on parameter `enabled`""" | |
155 | user = User.get_default_user() |
|
155 | user = User.get_default_user() | |
156 | user.active = enabled |
|
156 | user.active = enabled | |
157 | Session().add(user) |
|
157 | Session().add(user) | |
158 | Session().commit() |
|
158 | Session().commit() | |
159 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
159 | time.sleep(1.5) # must sleep for cache (1s to expire) | |
160 | log.info('anonymous access is now: %s', enabled) |
|
160 | log.info('anonymous access is now: %s', enabled) | |
161 | assert enabled == User.get_default_user().active, ( |
|
161 | assert enabled == User.get_default_user().active, ( | |
162 | 'Cannot set anonymous access') |
|
162 | 'Cannot set anonymous access') | |
163 |
|
163 | |||
164 |
|
164 | |||
165 | def check_xfail_backends(node, backend_alias): |
|
165 | def check_xfail_backends(node, backend_alias): | |
166 | # Using "xfail_backends" here intentionally, since this marks work |
|
166 | # Using "xfail_backends" here intentionally, since this marks work | |
167 | # which is "to be done" soon. |
|
167 | # which is "to be done" soon. | |
168 | skip_marker = node.get_closest_marker('xfail_backends') |
|
168 | skip_marker = node.get_closest_marker('xfail_backends') | |
169 | if skip_marker and backend_alias in skip_marker.args: |
|
169 | if skip_marker and backend_alias in skip_marker.args: | |
170 | msg = "Support for backend %s to be developed." % (backend_alias, ) |
|
170 | msg = "Support for backend %s to be developed." % (backend_alias, ) | |
171 | msg = skip_marker.kwargs.get('reason', msg) |
|
171 | msg = skip_marker.kwargs.get('reason', msg) | |
172 | pytest.xfail(msg) |
|
172 | pytest.xfail(msg) | |
173 |
|
173 | |||
174 |
|
174 | |||
175 | def check_skip_backends(node, backend_alias): |
|
175 | def check_skip_backends(node, backend_alias): | |
176 | # Using "skip_backends" here intentionally, since this marks work which is |
|
176 | # Using "skip_backends" here intentionally, since this marks work which is | |
177 | # not supported. |
|
177 | # not supported. | |
178 | skip_marker = node.get_closest_marker('skip_backends') |
|
178 | skip_marker = node.get_closest_marker('skip_backends') | |
179 | if skip_marker and backend_alias in skip_marker.args: |
|
179 | if skip_marker and backend_alias in skip_marker.args: | |
180 | msg = "Feature not supported for backend %s." % (backend_alias, ) |
|
180 | msg = "Feature not supported for backend %s." % (backend_alias, ) | |
181 | msg = skip_marker.kwargs.get('reason', msg) |
|
181 | msg = skip_marker.kwargs.get('reason', msg) | |
182 | pytest.skip(msg) |
|
182 | pytest.skip(msg) | |
183 |
|
183 | |||
184 |
|
184 | |||
185 | def extract_git_repo_from_dump(dump_name, repo_name): |
|
185 | def extract_git_repo_from_dump(dump_name, repo_name): | |
186 | """Create git repo `repo_name` from dump `dump_name`.""" |
|
186 | """Create git repo `repo_name` from dump `dump_name`.""" | |
187 | repos_path = ScmModel().repos_path |
|
187 | repos_path = ScmModel().repos_path | |
188 | target_path = os.path.join(repos_path, repo_name) |
|
188 | target_path = os.path.join(repos_path, repo_name) | |
189 | rc_testdata.extract_git_dump(dump_name, target_path) |
|
189 | rc_testdata.extract_git_dump(dump_name, target_path) | |
190 | return target_path |
|
190 | return target_path | |
191 |
|
191 | |||
192 |
|
192 | |||
193 | def extract_hg_repo_from_dump(dump_name, repo_name): |
|
193 | def extract_hg_repo_from_dump(dump_name, repo_name): | |
194 | """Create hg repo `repo_name` from dump `dump_name`.""" |
|
194 | """Create hg repo `repo_name` from dump `dump_name`.""" | |
195 | repos_path = ScmModel().repos_path |
|
195 | repos_path = ScmModel().repos_path | |
196 | target_path = os.path.join(repos_path, repo_name) |
|
196 | target_path = os.path.join(repos_path, repo_name) | |
197 | rc_testdata.extract_hg_dump(dump_name, target_path) |
|
197 | rc_testdata.extract_hg_dump(dump_name, target_path) | |
198 | return target_path |
|
198 | return target_path | |
199 |
|
199 | |||
200 |
|
200 | |||
201 | def extract_svn_repo_from_dump(dump_name, repo_name): |
|
201 | def extract_svn_repo_from_dump(dump_name, repo_name): | |
202 | """Create a svn repo `repo_name` from dump `dump_name`.""" |
|
202 | """Create a svn repo `repo_name` from dump `dump_name`.""" | |
203 | repos_path = ScmModel().repos_path |
|
203 | repos_path = ScmModel().repos_path | |
204 | target_path = os.path.join(repos_path, repo_name) |
|
204 | target_path = os.path.join(repos_path, repo_name) | |
205 | SubversionRepository(target_path, create=True) |
|
205 | SubversionRepository(target_path, create=True) | |
206 | _load_svn_dump_into_repo(dump_name, target_path) |
|
206 | _load_svn_dump_into_repo(dump_name, target_path) | |
207 | return target_path |
|
207 | return target_path | |
208 |
|
208 | |||
209 |
|
209 | |||
210 | def assert_message_in_log(log_records, message, levelno, module): |
|
210 | def assert_message_in_log(log_records, message, levelno, module): | |
211 | messages = [ |
|
211 | messages = [ | |
212 | r.message for r in log_records |
|
212 | r.message for r in log_records | |
213 | if r.module == module and r.levelno == levelno |
|
213 | if r.module == module and r.levelno == levelno | |
214 | ] |
|
214 | ] | |
215 | assert message in messages |
|
215 | assert message in messages | |
216 |
|
216 | |||
217 |
|
217 | |||
218 | def _load_svn_dump_into_repo(dump_name, repo_path): |
|
218 | def _load_svn_dump_into_repo(dump_name, repo_path): | |
219 | """ |
|
219 | """ | |
220 | Utility to populate a svn repository with a named dump |
|
220 | Utility to populate a svn repository with a named dump | |
221 |
|
221 | |||
222 | Currently the dumps are in rc_testdata. They might later on be |
|
222 | Currently the dumps are in rc_testdata. They might later on be | |
223 | integrated with the main repository once they stabilize more. |
|
223 | integrated with the main repository once they stabilize more. | |
224 | """ |
|
224 | """ | |
225 | dump = rc_testdata.load_svn_dump(dump_name) |
|
225 | dump = rc_testdata.load_svn_dump(dump_name) | |
226 | load_dump = subprocess32.Popen( |
|
226 | load_dump = subprocess32.Popen( | |
227 | ['svnadmin', 'load', repo_path], |
|
227 | ['svnadmin', 'load', repo_path], | |
228 | stdin=subprocess32.PIPE, stdout=subprocess32.PIPE, |
|
228 | stdin=subprocess32.PIPE, stdout=subprocess32.PIPE, | |
229 | stderr=subprocess32.PIPE) |
|
229 | stderr=subprocess32.PIPE) | |
230 | out, err = load_dump.communicate(dump) |
|
230 | out, err = load_dump.communicate(dump) | |
231 | if load_dump.returncode != 0: |
|
231 | if load_dump.returncode != 0: | |
232 | log.error("Output of load_dump command: %s", out) |
|
232 | log.error("Output of load_dump command: %s", out) | |
233 | log.error("Error output of load_dump command: %s", err) |
|
233 | log.error("Error output of load_dump command: %s", err) | |
234 | raise Exception( |
|
234 | raise Exception( | |
235 | 'Failed to load dump "%s" into repository at path "%s".' |
|
235 | 'Failed to load dump "%s" into repository at path "%s".' | |
236 | % (dump_name, repo_path)) |
|
236 | % (dump_name, repo_path)) | |
237 |
|
237 | |||
238 |
|
238 | |||
239 | class AssertResponse(object): |
|
239 | class AssertResponse(object): | |
240 | """ |
|
240 | """ | |
241 | Utility that helps to assert things about a given HTML response. |
|
241 | Utility that helps to assert things about a given HTML response. | |
242 | """ |
|
242 | """ | |
243 |
|
243 | |||
244 | def __init__(self, response): |
|
244 | def __init__(self, response): | |
245 | self.response = response |
|
245 | self.response = response | |
246 |
|
246 | |||
247 | def get_imports(self): |
|
247 | def get_imports(self): | |
248 | return fromstring, tostring, CSSSelector |
|
248 | return fromstring, tostring, CSSSelector | |
249 |
|
249 | |||
250 | def one_element_exists(self, css_selector): |
|
250 | def one_element_exists(self, css_selector): | |
251 | self.get_element(css_selector) |
|
251 | self.get_element(css_selector) | |
252 |
|
252 | |||
253 | def no_element_exists(self, css_selector): |
|
253 | def no_element_exists(self, css_selector): | |
254 | assert not self._get_elements(css_selector) |
|
254 | assert not self._get_elements(css_selector) | |
255 |
|
255 | |||
256 | def element_equals_to(self, css_selector, expected_content): |
|
256 | def element_equals_to(self, css_selector, expected_content): | |
257 | element = self.get_element(css_selector) |
|
257 | element = self.get_element(css_selector) | |
258 | element_text = self._element_to_string(element) |
|
258 | element_text = self._element_to_string(element) | |
259 | assert expected_content in element_text |
|
259 | assert expected_content in element_text | |
260 |
|
260 | |||
261 | def element_contains(self, css_selector, expected_content): |
|
261 | def element_contains(self, css_selector, expected_content): | |
262 | element = self.get_element(css_selector) |
|
262 | element = self.get_element(css_selector) | |
263 | assert expected_content in element.text_content() |
|
263 | assert expected_content in element.text_content() | |
264 |
|
264 | |||
265 | def element_value_contains(self, css_selector, expected_content): |
|
265 | def element_value_contains(self, css_selector, expected_content): | |
266 | element = self.get_element(css_selector) |
|
266 | element = self.get_element(css_selector) | |
267 | assert expected_content in element.value |
|
267 | assert expected_content in element.value | |
268 |
|
268 | |||
269 | def contains_one_link(self, link_text, href): |
|
269 | def contains_one_link(self, link_text, href): | |
270 | fromstring, tostring, CSSSelector = self.get_imports() |
|
270 | fromstring, tostring, CSSSelector = self.get_imports() | |
271 | doc = fromstring(self.response.body) |
|
271 | doc = fromstring(self.response.body) | |
272 | sel = CSSSelector('a[href]') |
|
272 | sel = CSSSelector('a[href]') | |
273 | elements = [ |
|
273 | elements = [ | |
274 | e for e in sel(doc) if e.text_content().strip() == link_text] |
|
274 | e for e in sel(doc) if e.text_content().strip() == link_text] | |
275 | assert len(elements) == 1, "Did not find link or found multiple links" |
|
275 | assert len(elements) == 1, "Did not find link or found multiple links" | |
276 | self._ensure_url_equal(elements[0].attrib.get('href'), href) |
|
276 | self._ensure_url_equal(elements[0].attrib.get('href'), href) | |
277 |
|
277 | |||
278 | def contains_one_anchor(self, anchor_id): |
|
278 | def contains_one_anchor(self, anchor_id): | |
279 | fromstring, tostring, CSSSelector = self.get_imports() |
|
279 | fromstring, tostring, CSSSelector = self.get_imports() | |
280 | doc = fromstring(self.response.body) |
|
280 | doc = fromstring(self.response.body) | |
281 | sel = CSSSelector('#' + anchor_id) |
|
281 | sel = CSSSelector('#' + anchor_id) | |
282 | elements = sel(doc) |
|
282 | elements = sel(doc) | |
283 | assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id) |
|
283 | assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id) | |
284 |
|
284 | |||
285 | def _ensure_url_equal(self, found, expected): |
|
285 | def _ensure_url_equal(self, found, expected): | |
286 | assert _Url(found) == _Url(expected) |
|
286 | assert _Url(found) == _Url(expected) | |
287 |
|
287 | |||
288 | def get_element(self, css_selector): |
|
288 | def get_element(self, css_selector): | |
289 | elements = self._get_elements(css_selector) |
|
289 | elements = self._get_elements(css_selector) | |
290 | assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector) |
|
290 | assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector) | |
291 | return elements[0] |
|
291 | return elements[0] | |
292 |
|
292 | |||
293 | def get_elements(self, css_selector): |
|
293 | def get_elements(self, css_selector): | |
294 | return self._get_elements(css_selector) |
|
294 | return self._get_elements(css_selector) | |
295 |
|
295 | |||
296 | def _get_elements(self, css_selector): |
|
296 | def _get_elements(self, css_selector): | |
297 | fromstring, tostring, CSSSelector = self.get_imports() |
|
297 | fromstring, tostring, CSSSelector = self.get_imports() | |
298 | doc = fromstring(self.response.body) |
|
298 | doc = fromstring(self.response.body) | |
299 | sel = CSSSelector(css_selector) |
|
299 | sel = CSSSelector(css_selector) | |
300 | elements = sel(doc) |
|
300 | elements = sel(doc) | |
301 | return elements |
|
301 | return elements | |
302 |
|
302 | |||
303 | def _element_to_string(self, element): |
|
303 | def _element_to_string(self, element): | |
304 | fromstring, tostring, CSSSelector = self.get_imports() |
|
304 | fromstring, tostring, CSSSelector = self.get_imports() | |
305 | return tostring(element) |
|
305 | return tostring(element) | |
306 |
|
306 | |||
307 |
|
307 | |||
308 | class _Url(object): |
|
308 | class _Url(object): | |
309 | """ |
|
309 | """ | |
310 | A url object that can be compared with other url orbjects |
|
310 | A url object that can be compared with other url orbjects | |
311 | without regard to the vagaries of encoding, escaping, and ordering |
|
311 | without regard to the vagaries of encoding, escaping, and ordering | |
312 | of parameters in query strings. |
|
312 | of parameters in query strings. | |
313 |
|
313 | |||
314 | Inspired by |
|
314 | Inspired by | |
315 | http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python |
|
315 | http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python | |
316 | """ |
|
316 | """ | |
317 |
|
317 | |||
318 | def __init__(self, url): |
|
318 | def __init__(self, url): | |
319 | parts = urlparse(url) |
|
319 | parts = urllib.parse.urlparse(url) | |
320 | _query = frozenset(parse_qsl(parts.query)) |
|
320 | _query = frozenset(urllib.parse.parse_qsl(parts.query)) | |
321 | _path = unquote_plus(parts.path) |
|
321 | _path = unquote_plus(parts.path) | |
322 | parts = parts._replace(query=_query, path=_path) |
|
322 | parts = parts._replace(query=_query, path=_path) | |
323 | self.parts = parts |
|
323 | self.parts = parts | |
324 |
|
324 | |||
325 | def __eq__(self, other): |
|
325 | def __eq__(self, other): | |
326 | return self.parts == other.parts |
|
326 | return self.parts == other.parts | |
327 |
|
327 | |||
328 | def __hash__(self): |
|
328 | def __hash__(self): | |
329 | return hash(self.parts) |
|
329 | return hash(self.parts) | |
330 |
|
330 | |||
331 |
|
331 | |||
332 | def run_test_concurrently(times, raise_catched_exc=True): |
|
332 | def run_test_concurrently(times, raise_catched_exc=True): | |
333 | """ |
|
333 | """ | |
334 | Add this decorator to small pieces of code that you want to test |
|
334 | Add this decorator to small pieces of code that you want to test | |
335 | concurrently |
|
335 | concurrently | |
336 |
|
336 | |||
337 | ex: |
|
337 | ex: | |
338 |
|
338 | |||
339 | @test_concurrently(25) |
|
339 | @test_concurrently(25) | |
340 | def my_test_function(): |
|
340 | def my_test_function(): | |
341 | ... |
|
341 | ... | |
342 | """ |
|
342 | """ | |
343 | def test_concurrently_decorator(test_func): |
|
343 | def test_concurrently_decorator(test_func): | |
344 | def wrapper(*args, **kwargs): |
|
344 | def wrapper(*args, **kwargs): | |
345 | exceptions = [] |
|
345 | exceptions = [] | |
346 |
|
346 | |||
347 | def call_test_func(): |
|
347 | def call_test_func(): | |
348 | try: |
|
348 | try: | |
349 | test_func(*args, **kwargs) |
|
349 | test_func(*args, **kwargs) | |
350 | except Exception as e: |
|
350 | except Exception as e: | |
351 | exceptions.append(e) |
|
351 | exceptions.append(e) | |
352 | if raise_catched_exc: |
|
352 | if raise_catched_exc: | |
353 | raise |
|
353 | raise | |
354 | threads = [] |
|
354 | threads = [] | |
355 | for i in range(times): |
|
355 | for i in range(times): | |
356 | threads.append(threading.Thread(target=call_test_func)) |
|
356 | threads.append(threading.Thread(target=call_test_func)) | |
357 | for t in threads: |
|
357 | for t in threads: | |
358 | t.start() |
|
358 | t.start() | |
359 | for t in threads: |
|
359 | for t in threads: | |
360 | t.join() |
|
360 | t.join() | |
361 | if exceptions: |
|
361 | if exceptions: | |
362 | raise Exception( |
|
362 | raise Exception( | |
363 | 'test_concurrently intercepted %s exceptions: %s' % ( |
|
363 | 'test_concurrently intercepted %s exceptions: %s' % ( | |
364 | len(exceptions), exceptions)) |
|
364 | len(exceptions), exceptions)) | |
365 | return wrapper |
|
365 | return wrapper | |
366 | return test_concurrently_decorator |
|
366 | return test_concurrently_decorator | |
367 |
|
367 | |||
368 |
|
368 | |||
369 | def wait_for_url(url, timeout=10): |
|
369 | def wait_for_url(url, timeout=10): | |
370 | """ |
|
370 | """ | |
371 | Wait until URL becomes reachable. |
|
371 | Wait until URL becomes reachable. | |
372 |
|
372 | |||
373 | It polls the URL until the timeout is reached or it became reachable. |
|
373 | It polls the URL until the timeout is reached or it became reachable. | |
374 | If will call to `py.test.fail` in case the URL is not reachable. |
|
374 | If will call to `py.test.fail` in case the URL is not reachable. | |
375 | """ |
|
375 | """ | |
376 | timeout = time.time() + timeout |
|
376 | timeout = time.time() + timeout | |
377 | last = 0 |
|
377 | last = 0 | |
378 | wait = 0.1 |
|
378 | wait = 0.1 | |
379 |
|
379 | |||
380 | while timeout > last: |
|
380 | while timeout > last: | |
381 | last = time.time() |
|
381 | last = time.time() | |
382 | if is_url_reachable(url): |
|
382 | if is_url_reachable(url): | |
383 | break |
|
383 | break | |
384 | elif (last + wait) > time.time(): |
|
384 | elif (last + wait) > time.time(): | |
385 | # Go to sleep because not enough time has passed since last check. |
|
385 | # Go to sleep because not enough time has passed since last check. | |
386 | time.sleep(wait) |
|
386 | time.sleep(wait) | |
387 | else: |
|
387 | else: | |
388 | pytest.fail("Timeout while waiting for URL {}".format(url)) |
|
388 | pytest.fail("Timeout while waiting for URL {}".format(url)) | |
389 |
|
389 | |||
390 |
|
390 | |||
391 | def is_url_reachable(url): |
|
391 | def is_url_reachable(url): | |
392 | try: |
|
392 | try: | |
393 | urllib.request.urlopen(url) |
|
393 | urllib.request.urlopen(url) | |
394 | except urllib.error.URLError: |
|
394 | except urllib.error.URLError: | |
395 | log.exception('URL `{}` reach error'.format(url)) |
|
395 | log.exception('URL `{}` reach error'.format(url)) | |
396 | return False |
|
396 | return False | |
397 | return True |
|
397 | return True | |
398 |
|
398 | |||
399 |
|
399 | |||
400 | def repo_on_filesystem(repo_name): |
|
400 | def repo_on_filesystem(repo_name): | |
401 | from rhodecode.lib import vcs |
|
401 | from rhodecode.lib import vcs | |
402 | from rhodecode.tests import TESTS_TMP_PATH |
|
402 | from rhodecode.tests import TESTS_TMP_PATH | |
403 | repo = vcs.get_vcs_instance( |
|
403 | repo = vcs.get_vcs_instance( | |
404 | os.path.join(TESTS_TMP_PATH, repo_name), create=False) |
|
404 | os.path.join(TESTS_TMP_PATH, repo_name), create=False) | |
405 | return repo is not None |
|
405 | return repo is not None | |
406 |
|
406 | |||
407 |
|
407 | |||
408 | def commit_change( |
|
408 | def commit_change( | |
409 | repo, filename, content, message, vcs_type, parent=None, newfile=False): |
|
409 | repo, filename, content, message, vcs_type, parent=None, newfile=False): | |
410 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
410 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
411 |
|
411 | |||
412 | repo = Repository.get_by_repo_name(repo) |
|
412 | repo = Repository.get_by_repo_name(repo) | |
413 | _commit = parent |
|
413 | _commit = parent | |
414 | if not parent: |
|
414 | if not parent: | |
415 | _commit = EmptyCommit(alias=vcs_type) |
|
415 | _commit = EmptyCommit(alias=vcs_type) | |
416 |
|
416 | |||
417 | if newfile: |
|
417 | if newfile: | |
418 | nodes = { |
|
418 | nodes = { | |
419 | filename: { |
|
419 | filename: { | |
420 | 'content': content |
|
420 | 'content': content | |
421 | } |
|
421 | } | |
422 | } |
|
422 | } | |
423 | commit = ScmModel().create_nodes( |
|
423 | commit = ScmModel().create_nodes( | |
424 | user=TEST_USER_ADMIN_LOGIN, repo=repo, |
|
424 | user=TEST_USER_ADMIN_LOGIN, repo=repo, | |
425 | message=message, |
|
425 | message=message, | |
426 | nodes=nodes, |
|
426 | nodes=nodes, | |
427 | parent_commit=_commit, |
|
427 | parent_commit=_commit, | |
428 | author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN), |
|
428 | author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN), | |
429 | ) |
|
429 | ) | |
430 | else: |
|
430 | else: | |
431 | commit = ScmModel().commit_change( |
|
431 | commit = ScmModel().commit_change( | |
432 | repo=repo.scm_instance(), repo_name=repo.repo_name, |
|
432 | repo=repo.scm_instance(), repo_name=repo.repo_name, | |
433 | commit=parent, user=TEST_USER_ADMIN_LOGIN, |
|
433 | commit=parent, user=TEST_USER_ADMIN_LOGIN, | |
434 | author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN), |
|
434 | author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN), | |
435 | message=message, |
|
435 | message=message, | |
436 | content=content, |
|
436 | content=content, | |
437 | f_path=filename |
|
437 | f_path=filename | |
438 | ) |
|
438 | ) | |
439 | return commit |
|
439 | return commit | |
440 |
|
440 | |||
441 |
|
441 | |||
442 | def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None): |
|
442 | def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None): | |
443 | if not default: |
|
443 | if not default: | |
444 | raise ValueError('Permission for default user must be given') |
|
444 | raise ValueError('Permission for default user must be given') | |
445 | form_data = [( |
|
445 | form_data = [( | |
446 | 'csrf_token', csrf_token |
|
446 | 'csrf_token', csrf_token | |
447 | )] |
|
447 | )] | |
448 | # add default |
|
448 | # add default | |
449 | form_data.extend([ |
|
449 | form_data.extend([ | |
450 | ('u_perm_1', default) |
|
450 | ('u_perm_1', default) | |
451 | ]) |
|
451 | ]) | |
452 |
|
452 | |||
453 | if grant: |
|
453 | if grant: | |
454 | for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1): |
|
454 | for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1): | |
455 | form_data.extend([ |
|
455 | form_data.extend([ | |
456 | ('perm_new_member_perm_new{}'.format(cnt), perm), |
|
456 | ('perm_new_member_perm_new{}'.format(cnt), perm), | |
457 | ('perm_new_member_id_new{}'.format(cnt), obj_id), |
|
457 | ('perm_new_member_id_new{}'.format(cnt), obj_id), | |
458 | ('perm_new_member_name_new{}'.format(cnt), obj_name), |
|
458 | ('perm_new_member_name_new{}'.format(cnt), obj_name), | |
459 | ('perm_new_member_type_new{}'.format(cnt), obj_type), |
|
459 | ('perm_new_member_type_new{}'.format(cnt), obj_type), | |
460 |
|
460 | |||
461 | ]) |
|
461 | ]) | |
462 | if revoke: |
|
462 | if revoke: | |
463 | for obj_id, obj_type in revoke: |
|
463 | for obj_id, obj_type in revoke: | |
464 | form_data.extend([ |
|
464 | form_data.extend([ | |
465 | ('perm_del_member_id_{}'.format(obj_id), obj_id), |
|
465 | ('perm_del_member_id_{}'.format(obj_id), obj_id), | |
466 | ('perm_del_member_type_{}'.format(obj_id), obj_type), |
|
466 | ('perm_del_member_type_{}'.format(obj_id), obj_type), | |
467 | ]) |
|
467 | ]) | |
468 | return form_data |
|
468 | return form_data |
General Comments 0
You need to be logged in to leave comments.
Login now