Show More
@@ -1,578 +1,580 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import urlparse |
|
21 | import urlparse | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 |
|
25 | |||
26 | from rhodecode.tests import ( |
|
26 | from rhodecode.tests import ( | |
27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, |
|
27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, | |
28 | no_newline_id_generator) |
|
28 | no_newline_id_generator) | |
29 | from rhodecode.tests.fixture import Fixture |
|
29 | from rhodecode.tests.fixture import Fixture | |
30 | from rhodecode.lib.auth import check_password |
|
30 | from rhodecode.lib.auth import check_password | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.model.auth_token import AuthTokenModel |
|
32 | from rhodecode.model.auth_token import AuthTokenModel | |
33 | from rhodecode.model.db import User, Notification, UserApiKeys |
|
33 | from rhodecode.model.db import User, Notification, UserApiKeys | |
34 | from rhodecode.model.meta import Session |
|
34 | from rhodecode.model.meta import Session | |
35 |
|
35 | |||
36 | fixture = Fixture() |
|
36 | fixture = Fixture() | |
37 |
|
37 | |||
38 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] |
|
38 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | def route_path(name, params=None, **kwargs): |
|
41 | def route_path(name, params=None, **kwargs): | |
42 | import urllib |
|
42 | import urllib | |
43 | from rhodecode.apps._base import ADMIN_PREFIX |
|
43 | from rhodecode.apps._base import ADMIN_PREFIX | |
44 |
|
44 | |||
45 | base_url = { |
|
45 | base_url = { | |
46 | 'login': ADMIN_PREFIX + '/login', |
|
46 | 'login': ADMIN_PREFIX + '/login', | |
47 | 'logout': ADMIN_PREFIX + '/logout', |
|
47 | 'logout': ADMIN_PREFIX + '/logout', | |
48 | 'register': ADMIN_PREFIX + '/register', |
|
48 | 'register': ADMIN_PREFIX + '/register', | |
49 | 'reset_password': |
|
49 | 'reset_password': | |
50 | ADMIN_PREFIX + '/password_reset', |
|
50 | ADMIN_PREFIX + '/password_reset', | |
51 | 'reset_password_confirmation': |
|
51 | 'reset_password_confirmation': | |
52 | ADMIN_PREFIX + '/password_reset_confirmation', |
|
52 | ADMIN_PREFIX + '/password_reset_confirmation', | |
53 |
|
53 | |||
54 | 'admin_permissions_application': |
|
54 | 'admin_permissions_application': | |
55 | ADMIN_PREFIX + '/permissions/application', |
|
55 | ADMIN_PREFIX + '/permissions/application', | |
56 | 'admin_permissions_application_update': |
|
56 | 'admin_permissions_application_update': | |
57 | ADMIN_PREFIX + '/permissions/application/update', |
|
57 | ADMIN_PREFIX + '/permissions/application/update', | |
58 |
|
58 | |||
59 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' |
|
59 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' | |
60 |
|
60 | |||
61 | }[name].format(**kwargs) |
|
61 | }[name].format(**kwargs) | |
62 |
|
62 | |||
63 | if params: |
|
63 | if params: | |
64 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
64 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
65 | return base_url |
|
65 | return base_url | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | @pytest.mark.usefixtures('app') |
|
68 | @pytest.mark.usefixtures('app') | |
69 | class TestLoginController(object): |
|
69 | class TestLoginController(object): | |
70 | destroy_users = set() |
|
70 | destroy_users = set() | |
71 |
|
71 | |||
72 | @classmethod |
|
72 | @classmethod | |
73 | def teardown_class(cls): |
|
73 | def teardown_class(cls): | |
74 | fixture.destroy_users(cls.destroy_users) |
|
74 | fixture.destroy_users(cls.destroy_users) | |
75 |
|
75 | |||
76 | def teardown_method(self, method): |
|
76 | def teardown_method(self, method): | |
77 | for n in Notification.query().all(): |
|
77 | for n in Notification.query().all(): | |
78 | Session().delete(n) |
|
78 | Session().delete(n) | |
79 |
|
79 | |||
80 | Session().commit() |
|
80 | Session().commit() | |
81 | assert Notification.query().all() == [] |
|
81 | assert Notification.query().all() == [] | |
82 |
|
82 | |||
83 | def test_index(self): |
|
83 | def test_index(self): | |
84 | response = self.app.get(route_path('login')) |
|
84 | response = self.app.get(route_path('login')) | |
85 | assert response.status == '200 OK' |
|
85 | assert response.status == '200 OK' | |
86 | # Test response... |
|
86 | # Test response... | |
87 |
|
87 | |||
88 | def test_login_admin_ok(self): |
|
88 | def test_login_admin_ok(self): | |
89 | response = self.app.post(route_path('login'), |
|
89 | response = self.app.post(route_path('login'), | |
90 | {'username': 'test_admin', |
|
90 | {'username': 'test_admin', | |
91 | 'password': 'test12'}, status=302) |
|
91 | 'password': 'test12'}, status=302) | |
92 | response = response.follow() |
|
92 | response = response.follow() | |
93 | session = response.get_session_from_response() |
|
93 | session = response.get_session_from_response() | |
94 | username = session['rhodecode_user'].get('username') |
|
94 | username = session['rhodecode_user'].get('username') | |
95 | assert username == 'test_admin' |
|
95 | assert username == 'test_admin' | |
96 | response.mustcontain('logout') |
|
96 | response.mustcontain('logout') | |
97 |
|
97 | |||
98 | def test_login_regular_ok(self): |
|
98 | def test_login_regular_ok(self): | |
99 | response = self.app.post(route_path('login'), |
|
99 | response = self.app.post(route_path('login'), | |
100 | {'username': 'test_regular', |
|
100 | {'username': 'test_regular', | |
101 | 'password': 'test12'}, status=302) |
|
101 | 'password': 'test12'}, status=302) | |
102 |
|
102 | |||
103 | response = response.follow() |
|
103 | response = response.follow() | |
104 | session = response.get_session_from_response() |
|
104 | session = response.get_session_from_response() | |
105 | username = session['rhodecode_user'].get('username') |
|
105 | username = session['rhodecode_user'].get('username') | |
106 | assert username == 'test_regular' |
|
106 | assert username == 'test_regular' | |
107 | response.mustcontain('logout') |
|
107 | response.mustcontain('logout') | |
108 |
|
108 | |||
109 | def test_login_regular_forbidden_when_super_admin_restriction(self): |
|
109 | def test_login_regular_forbidden_when_super_admin_restriction(self): | |
110 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
110 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin | |
111 | with fixture.auth_restriction(RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN): |
|
111 | with fixture.auth_restriction(self.app._pyramid_registry, | |
|
112 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN): | |||
112 | response = self.app.post(route_path('login'), |
|
113 | response = self.app.post(route_path('login'), | |
113 | {'username': 'test_regular', |
|
114 | {'username': 'test_regular', | |
114 | 'password': 'test12'}) |
|
115 | 'password': 'test12'}) | |
115 |
|
116 | |||
116 | response.mustcontain('invalid user name') |
|
117 | response.mustcontain('invalid user name') | |
117 | response.mustcontain('invalid password') |
|
118 | response.mustcontain('invalid password') | |
118 |
|
119 | |||
119 | def test_login_regular_forbidden_when_scope_restriction(self): |
|
120 | def test_login_regular_forbidden_when_scope_restriction(self): | |
120 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin |
|
121 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin | |
121 | with fixture.scope_restriction(RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS): |
|
122 | with fixture.scope_restriction(self.app._pyramid_registry, | |
|
123 | RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS): | |||
122 | response = self.app.post(route_path('login'), |
|
124 | response = self.app.post(route_path('login'), | |
123 | {'username': 'test_regular', |
|
125 | {'username': 'test_regular', | |
124 | 'password': 'test12'}) |
|
126 | 'password': 'test12'}) | |
125 |
|
127 | |||
126 | response.mustcontain('invalid user name') |
|
128 | response.mustcontain('invalid user name') | |
127 | response.mustcontain('invalid password') |
|
129 | response.mustcontain('invalid password') | |
128 |
|
130 | |||
129 | def test_login_ok_came_from(self): |
|
131 | def test_login_ok_came_from(self): | |
130 | test_came_from = '/_admin/users?branch=stable' |
|
132 | test_came_from = '/_admin/users?branch=stable' | |
131 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) |
|
133 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) | |
132 | response = self.app.post( |
|
134 | response = self.app.post( | |
133 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) |
|
135 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) | |
134 |
|
136 | |||
135 | assert 'branch=stable' in response.location |
|
137 | assert 'branch=stable' in response.location | |
136 | response = response.follow() |
|
138 | response = response.follow() | |
137 |
|
139 | |||
138 | assert response.status == '200 OK' |
|
140 | assert response.status == '200 OK' | |
139 | response.mustcontain('Users administration') |
|
141 | response.mustcontain('Users administration') | |
140 |
|
142 | |||
141 | def test_redirect_to_login_with_get_args(self): |
|
143 | def test_redirect_to_login_with_get_args(self): | |
142 | with fixture.anon_access(False): |
|
144 | with fixture.anon_access(False): | |
143 | kwargs = {'branch': 'stable'} |
|
145 | kwargs = {'branch': 'stable'} | |
144 | response = self.app.get( |
|
146 | response = self.app.get( | |
145 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs), |
|
147 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs), | |
146 | status=302) |
|
148 | status=302) | |
147 |
|
149 | |||
148 | response_query = urlparse.parse_qsl(response.location) |
|
150 | response_query = urlparse.parse_qsl(response.location) | |
149 | assert 'branch=stable' in response_query[0][1] |
|
151 | assert 'branch=stable' in response_query[0][1] | |
150 |
|
152 | |||
151 | def test_login_form_with_get_args(self): |
|
153 | def test_login_form_with_get_args(self): | |
152 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) |
|
154 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) | |
153 | response = self.app.get(_url) |
|
155 | response = self.app.get(_url) | |
154 | assert 'branch%3Dstable' in response.form.action |
|
156 | assert 'branch%3Dstable' in response.form.action | |
155 |
|
157 | |||
156 | @pytest.mark.parametrize("url_came_from", [ |
|
158 | @pytest.mark.parametrize("url_came_from", [ | |
157 | 'data:text/html,<script>window.alert("xss")</script>', |
|
159 | 'data:text/html,<script>window.alert("xss")</script>', | |
158 | 'mailto:test@rhodecode.org', |
|
160 | 'mailto:test@rhodecode.org', | |
159 | 'file:///etc/passwd', |
|
161 | 'file:///etc/passwd', | |
160 | 'ftp://some.ftp.server', |
|
162 | 'ftp://some.ftp.server', | |
161 | 'http://other.domain', |
|
163 | 'http://other.domain', | |
162 | '/\r\nX-Forwarded-Host: http://example.org', |
|
164 | '/\r\nX-Forwarded-Host: http://example.org', | |
163 | ], ids=no_newline_id_generator) |
|
165 | ], ids=no_newline_id_generator) | |
164 | def test_login_bad_came_froms(self, url_came_from): |
|
166 | def test_login_bad_came_froms(self, url_came_from): | |
165 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) |
|
167 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) | |
166 | response = self.app.post( |
|
168 | response = self.app.post( | |
167 | _url, |
|
169 | _url, | |
168 | {'username': 'test_admin', 'password': 'test12'}) |
|
170 | {'username': 'test_admin', 'password': 'test12'}) | |
169 | assert response.status == '302 Found' |
|
171 | assert response.status == '302 Found' | |
170 | response = response.follow() |
|
172 | response = response.follow() | |
171 | assert response.status == '200 OK' |
|
173 | assert response.status == '200 OK' | |
172 | assert response.request.path == '/' |
|
174 | assert response.request.path == '/' | |
173 |
|
175 | |||
174 | def test_login_short_password(self): |
|
176 | def test_login_short_password(self): | |
175 | response = self.app.post(route_path('login'), |
|
177 | response = self.app.post(route_path('login'), | |
176 | {'username': 'test_admin', |
|
178 | {'username': 'test_admin', | |
177 | 'password': 'as'}) |
|
179 | 'password': 'as'}) | |
178 | assert response.status == '200 OK' |
|
180 | assert response.status == '200 OK' | |
179 |
|
181 | |||
180 | response.mustcontain('Enter 3 characters or more') |
|
182 | response.mustcontain('Enter 3 characters or more') | |
181 |
|
183 | |||
182 | def test_login_wrong_non_ascii_password(self, user_regular): |
|
184 | def test_login_wrong_non_ascii_password(self, user_regular): | |
183 | response = self.app.post( |
|
185 | response = self.app.post( | |
184 | route_path('login'), |
|
186 | route_path('login'), | |
185 | {'username': user_regular.username, |
|
187 | {'username': user_regular.username, | |
186 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) |
|
188 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) | |
187 |
|
189 | |||
188 | response.mustcontain('invalid user name') |
|
190 | response.mustcontain('invalid user name') | |
189 | response.mustcontain('invalid password') |
|
191 | response.mustcontain('invalid password') | |
190 |
|
192 | |||
191 | def test_login_with_non_ascii_password(self, user_util): |
|
193 | def test_login_with_non_ascii_password(self, user_util): | |
192 | password = u'valid-non-ascii\xe4' |
|
194 | password = u'valid-non-ascii\xe4' | |
193 | user = user_util.create_user(password=password) |
|
195 | user = user_util.create_user(password=password) | |
194 | response = self.app.post( |
|
196 | response = self.app.post( | |
195 | route_path('login'), |
|
197 | route_path('login'), | |
196 | {'username': user.username, |
|
198 | {'username': user.username, | |
197 | 'password': password.encode('utf-8')}) |
|
199 | 'password': password.encode('utf-8')}) | |
198 | assert response.status_code == 302 |
|
200 | assert response.status_code == 302 | |
199 |
|
201 | |||
200 | def test_login_wrong_username_password(self): |
|
202 | def test_login_wrong_username_password(self): | |
201 | response = self.app.post(route_path('login'), |
|
203 | response = self.app.post(route_path('login'), | |
202 | {'username': 'error', |
|
204 | {'username': 'error', | |
203 | 'password': 'test12'}) |
|
205 | 'password': 'test12'}) | |
204 |
|
206 | |||
205 | response.mustcontain('invalid user name') |
|
207 | response.mustcontain('invalid user name') | |
206 | response.mustcontain('invalid password') |
|
208 | response.mustcontain('invalid password') | |
207 |
|
209 | |||
208 | def test_login_admin_ok_password_migration(self, real_crypto_backend): |
|
210 | def test_login_admin_ok_password_migration(self, real_crypto_backend): | |
209 | from rhodecode.lib import auth |
|
211 | from rhodecode.lib import auth | |
210 |
|
212 | |||
211 | # create new user, with sha256 password |
|
213 | # create new user, with sha256 password | |
212 | temp_user = 'test_admin_sha256' |
|
214 | temp_user = 'test_admin_sha256' | |
213 | user = fixture.create_user(temp_user) |
|
215 | user = fixture.create_user(temp_user) | |
214 | user.password = auth._RhodeCodeCryptoSha256().hash_create( |
|
216 | user.password = auth._RhodeCodeCryptoSha256().hash_create( | |
215 | b'test123') |
|
217 | b'test123') | |
216 | Session().add(user) |
|
218 | Session().add(user) | |
217 | Session().commit() |
|
219 | Session().commit() | |
218 | self.destroy_users.add(temp_user) |
|
220 | self.destroy_users.add(temp_user) | |
219 | response = self.app.post(route_path('login'), |
|
221 | response = self.app.post(route_path('login'), | |
220 | {'username': temp_user, |
|
222 | {'username': temp_user, | |
221 | 'password': 'test123'}, status=302) |
|
223 | 'password': 'test123'}, status=302) | |
222 |
|
224 | |||
223 | response = response.follow() |
|
225 | response = response.follow() | |
224 | session = response.get_session_from_response() |
|
226 | session = response.get_session_from_response() | |
225 | username = session['rhodecode_user'].get('username') |
|
227 | username = session['rhodecode_user'].get('username') | |
226 | assert username == temp_user |
|
228 | assert username == temp_user | |
227 | response.mustcontain('logout') |
|
229 | response.mustcontain('logout') | |
228 |
|
230 | |||
229 | # new password should be bcrypted, after log-in and transfer |
|
231 | # new password should be bcrypted, after log-in and transfer | |
230 | user = User.get_by_username(temp_user) |
|
232 | user = User.get_by_username(temp_user) | |
231 | assert user.password.startswith('$') |
|
233 | assert user.password.startswith('$') | |
232 |
|
234 | |||
233 | # REGISTRATIONS |
|
235 | # REGISTRATIONS | |
234 | def test_register(self): |
|
236 | def test_register(self): | |
235 | response = self.app.get(route_path('register')) |
|
237 | response = self.app.get(route_path('register')) | |
236 | response.mustcontain('Create an Account') |
|
238 | response.mustcontain('Create an Account') | |
237 |
|
239 | |||
238 | def test_register_err_same_username(self): |
|
240 | def test_register_err_same_username(self): | |
239 | uname = 'test_admin' |
|
241 | uname = 'test_admin' | |
240 | response = self.app.post( |
|
242 | response = self.app.post( | |
241 | route_path('register'), |
|
243 | route_path('register'), | |
242 | { |
|
244 | { | |
243 | 'username': uname, |
|
245 | 'username': uname, | |
244 | 'password': 'test12', |
|
246 | 'password': 'test12', | |
245 | 'password_confirmation': 'test12', |
|
247 | 'password_confirmation': 'test12', | |
246 | 'email': 'goodmail@domain.com', |
|
248 | 'email': 'goodmail@domain.com', | |
247 | 'firstname': 'test', |
|
249 | 'firstname': 'test', | |
248 | 'lastname': 'test' |
|
250 | 'lastname': 'test' | |
249 | } |
|
251 | } | |
250 | ) |
|
252 | ) | |
251 |
|
253 | |||
252 | assertr = response.assert_response() |
|
254 | assertr = response.assert_response() | |
253 | msg = 'Username "%(username)s" already exists' |
|
255 | msg = 'Username "%(username)s" already exists' | |
254 | msg = msg % {'username': uname} |
|
256 | msg = msg % {'username': uname} | |
255 | assertr.element_contains('#username+.error-message', msg) |
|
257 | assertr.element_contains('#username+.error-message', msg) | |
256 |
|
258 | |||
257 | def test_register_err_same_email(self): |
|
259 | def test_register_err_same_email(self): | |
258 | response = self.app.post( |
|
260 | response = self.app.post( | |
259 | route_path('register'), |
|
261 | route_path('register'), | |
260 | { |
|
262 | { | |
261 | 'username': 'test_admin_0', |
|
263 | 'username': 'test_admin_0', | |
262 | 'password': 'test12', |
|
264 | 'password': 'test12', | |
263 | 'password_confirmation': 'test12', |
|
265 | 'password_confirmation': 'test12', | |
264 | 'email': 'test_admin@mail.com', |
|
266 | 'email': 'test_admin@mail.com', | |
265 | 'firstname': 'test', |
|
267 | 'firstname': 'test', | |
266 | 'lastname': 'test' |
|
268 | 'lastname': 'test' | |
267 | } |
|
269 | } | |
268 | ) |
|
270 | ) | |
269 |
|
271 | |||
270 | assertr = response.assert_response() |
|
272 | assertr = response.assert_response() | |
271 | msg = u'This e-mail address is already taken' |
|
273 | msg = u'This e-mail address is already taken' | |
272 | assertr.element_contains('#email+.error-message', msg) |
|
274 | assertr.element_contains('#email+.error-message', msg) | |
273 |
|
275 | |||
274 | def test_register_err_same_email_case_sensitive(self): |
|
276 | def test_register_err_same_email_case_sensitive(self): | |
275 | response = self.app.post( |
|
277 | response = self.app.post( | |
276 | route_path('register'), |
|
278 | route_path('register'), | |
277 | { |
|
279 | { | |
278 | 'username': 'test_admin_1', |
|
280 | 'username': 'test_admin_1', | |
279 | 'password': 'test12', |
|
281 | 'password': 'test12', | |
280 | 'password_confirmation': 'test12', |
|
282 | 'password_confirmation': 'test12', | |
281 | 'email': 'TesT_Admin@mail.COM', |
|
283 | 'email': 'TesT_Admin@mail.COM', | |
282 | 'firstname': 'test', |
|
284 | 'firstname': 'test', | |
283 | 'lastname': 'test' |
|
285 | 'lastname': 'test' | |
284 | } |
|
286 | } | |
285 | ) |
|
287 | ) | |
286 | assertr = response.assert_response() |
|
288 | assertr = response.assert_response() | |
287 | msg = u'This e-mail address is already taken' |
|
289 | msg = u'This e-mail address is already taken' | |
288 | assertr.element_contains('#email+.error-message', msg) |
|
290 | assertr.element_contains('#email+.error-message', msg) | |
289 |
|
291 | |||
290 | def test_register_err_wrong_data(self): |
|
292 | def test_register_err_wrong_data(self): | |
291 | response = self.app.post( |
|
293 | response = self.app.post( | |
292 | route_path('register'), |
|
294 | route_path('register'), | |
293 | { |
|
295 | { | |
294 | 'username': 'xs', |
|
296 | 'username': 'xs', | |
295 | 'password': 'test', |
|
297 | 'password': 'test', | |
296 | 'password_confirmation': 'test', |
|
298 | 'password_confirmation': 'test', | |
297 | 'email': 'goodmailm', |
|
299 | 'email': 'goodmailm', | |
298 | 'firstname': 'test', |
|
300 | 'firstname': 'test', | |
299 | 'lastname': 'test' |
|
301 | 'lastname': 'test' | |
300 | } |
|
302 | } | |
301 | ) |
|
303 | ) | |
302 | assert response.status == '200 OK' |
|
304 | assert response.status == '200 OK' | |
303 | response.mustcontain('An email address must contain a single @') |
|
305 | response.mustcontain('An email address must contain a single @') | |
304 | response.mustcontain('Enter a value 6 characters long or more') |
|
306 | response.mustcontain('Enter a value 6 characters long or more') | |
305 |
|
307 | |||
306 | def test_register_err_username(self): |
|
308 | def test_register_err_username(self): | |
307 | response = self.app.post( |
|
309 | response = self.app.post( | |
308 | route_path('register'), |
|
310 | route_path('register'), | |
309 | { |
|
311 | { | |
310 | 'username': 'error user', |
|
312 | 'username': 'error user', | |
311 | 'password': 'test12', |
|
313 | 'password': 'test12', | |
312 | 'password_confirmation': 'test12', |
|
314 | 'password_confirmation': 'test12', | |
313 | 'email': 'goodmailm', |
|
315 | 'email': 'goodmailm', | |
314 | 'firstname': 'test', |
|
316 | 'firstname': 'test', | |
315 | 'lastname': 'test' |
|
317 | 'lastname': 'test' | |
316 | } |
|
318 | } | |
317 | ) |
|
319 | ) | |
318 |
|
320 | |||
319 | response.mustcontain('An email address must contain a single @') |
|
321 | response.mustcontain('An email address must contain a single @') | |
320 | response.mustcontain( |
|
322 | response.mustcontain( | |
321 | 'Username may only contain ' |
|
323 | 'Username may only contain ' | |
322 | 'alphanumeric characters underscores, ' |
|
324 | 'alphanumeric characters underscores, ' | |
323 | 'periods or dashes and must begin with ' |
|
325 | 'periods or dashes and must begin with ' | |
324 | 'alphanumeric character') |
|
326 | 'alphanumeric character') | |
325 |
|
327 | |||
326 | def test_register_err_case_sensitive(self): |
|
328 | def test_register_err_case_sensitive(self): | |
327 | usr = 'Test_Admin' |
|
329 | usr = 'Test_Admin' | |
328 | response = self.app.post( |
|
330 | response = self.app.post( | |
329 | route_path('register'), |
|
331 | route_path('register'), | |
330 | { |
|
332 | { | |
331 | 'username': usr, |
|
333 | 'username': usr, | |
332 | 'password': 'test12', |
|
334 | 'password': 'test12', | |
333 | 'password_confirmation': 'test12', |
|
335 | 'password_confirmation': 'test12', | |
334 | 'email': 'goodmailm', |
|
336 | 'email': 'goodmailm', | |
335 | 'firstname': 'test', |
|
337 | 'firstname': 'test', | |
336 | 'lastname': 'test' |
|
338 | 'lastname': 'test' | |
337 | } |
|
339 | } | |
338 | ) |
|
340 | ) | |
339 |
|
341 | |||
340 | assertr = response.assert_response() |
|
342 | assertr = response.assert_response() | |
341 | msg = u'Username "%(username)s" already exists' |
|
343 | msg = u'Username "%(username)s" already exists' | |
342 | msg = msg % {'username': usr} |
|
344 | msg = msg % {'username': usr} | |
343 | assertr.element_contains('#username+.error-message', msg) |
|
345 | assertr.element_contains('#username+.error-message', msg) | |
344 |
|
346 | |||
345 | def test_register_special_chars(self): |
|
347 | def test_register_special_chars(self): | |
346 | response = self.app.post( |
|
348 | response = self.app.post( | |
347 | route_path('register'), |
|
349 | route_path('register'), | |
348 | { |
|
350 | { | |
349 | 'username': 'xxxaxn', |
|
351 | 'username': 'xxxaxn', | |
350 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
352 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
351 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
353 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
352 | 'email': 'goodmailm@test.plx', |
|
354 | 'email': 'goodmailm@test.plx', | |
353 | 'firstname': 'test', |
|
355 | 'firstname': 'test', | |
354 | 'lastname': 'test' |
|
356 | 'lastname': 'test' | |
355 | } |
|
357 | } | |
356 | ) |
|
358 | ) | |
357 |
|
359 | |||
358 | msg = u'Invalid characters (non-ascii) in password' |
|
360 | msg = u'Invalid characters (non-ascii) in password' | |
359 | response.mustcontain(msg) |
|
361 | response.mustcontain(msg) | |
360 |
|
362 | |||
361 | def test_register_password_mismatch(self): |
|
363 | def test_register_password_mismatch(self): | |
362 | response = self.app.post( |
|
364 | response = self.app.post( | |
363 | route_path('register'), |
|
365 | route_path('register'), | |
364 | { |
|
366 | { | |
365 | 'username': 'xs', |
|
367 | 'username': 'xs', | |
366 | 'password': '123qwe', |
|
368 | 'password': '123qwe', | |
367 | 'password_confirmation': 'qwe123', |
|
369 | 'password_confirmation': 'qwe123', | |
368 | 'email': 'goodmailm@test.plxa', |
|
370 | 'email': 'goodmailm@test.plxa', | |
369 | 'firstname': 'test', |
|
371 | 'firstname': 'test', | |
370 | 'lastname': 'test' |
|
372 | 'lastname': 'test' | |
371 | } |
|
373 | } | |
372 | ) |
|
374 | ) | |
373 | msg = u'Passwords do not match' |
|
375 | msg = u'Passwords do not match' | |
374 | response.mustcontain(msg) |
|
376 | response.mustcontain(msg) | |
375 |
|
377 | |||
376 | def test_register_ok(self): |
|
378 | def test_register_ok(self): | |
377 | username = 'test_regular4' |
|
379 | username = 'test_regular4' | |
378 | password = 'qweqwe' |
|
380 | password = 'qweqwe' | |
379 | email = 'marcin@test.com' |
|
381 | email = 'marcin@test.com' | |
380 | name = 'testname' |
|
382 | name = 'testname' | |
381 | lastname = 'testlastname' |
|
383 | lastname = 'testlastname' | |
382 |
|
384 | |||
383 | # this initializes a session |
|
385 | # this initializes a session | |
384 | response = self.app.get(route_path('register')) |
|
386 | response = self.app.get(route_path('register')) | |
385 | response.mustcontain('Create an Account') |
|
387 | response.mustcontain('Create an Account') | |
386 |
|
388 | |||
387 |
|
389 | |||
388 | response = self.app.post( |
|
390 | response = self.app.post( | |
389 | route_path('register'), |
|
391 | route_path('register'), | |
390 | { |
|
392 | { | |
391 | 'username': username, |
|
393 | 'username': username, | |
392 | 'password': password, |
|
394 | 'password': password, | |
393 | 'password_confirmation': password, |
|
395 | 'password_confirmation': password, | |
394 | 'email': email, |
|
396 | 'email': email, | |
395 | 'firstname': name, |
|
397 | 'firstname': name, | |
396 | 'lastname': lastname, |
|
398 | 'lastname': lastname, | |
397 | 'admin': True |
|
399 | 'admin': True | |
398 | }, |
|
400 | }, | |
399 | status=302 |
|
401 | status=302 | |
400 | ) # This should be overridden |
|
402 | ) # This should be overridden | |
401 |
|
403 | |||
402 | assert_session_flash( |
|
404 | assert_session_flash( | |
403 | response, 'You have successfully registered with RhodeCode. You can log-in now.') |
|
405 | response, 'You have successfully registered with RhodeCode. You can log-in now.') | |
404 |
|
406 | |||
405 | ret = Session().query(User).filter( |
|
407 | ret = Session().query(User).filter( | |
406 | User.username == 'test_regular4').one() |
|
408 | User.username == 'test_regular4').one() | |
407 | assert ret.username == username |
|
409 | assert ret.username == username | |
408 | assert check_password(password, ret.password) |
|
410 | assert check_password(password, ret.password) | |
409 | assert ret.email == email |
|
411 | assert ret.email == email | |
410 | assert ret.name == name |
|
412 | assert ret.name == name | |
411 | assert ret.lastname == lastname |
|
413 | assert ret.lastname == lastname | |
412 | assert ret.auth_tokens is not None |
|
414 | assert ret.auth_tokens is not None | |
413 | assert not ret.admin |
|
415 | assert not ret.admin | |
414 |
|
416 | |||
415 | def test_forgot_password_wrong_mail(self): |
|
417 | def test_forgot_password_wrong_mail(self): | |
416 | bad_email = 'marcin@wrongmail.org' |
|
418 | bad_email = 'marcin@wrongmail.org' | |
417 | # this initializes a session |
|
419 | # this initializes a session | |
418 | self.app.get(route_path('reset_password')) |
|
420 | self.app.get(route_path('reset_password')) | |
419 |
|
421 | |||
420 | response = self.app.post( |
|
422 | response = self.app.post( | |
421 | route_path('reset_password'), {'email': bad_email, } |
|
423 | route_path('reset_password'), {'email': bad_email, } | |
422 | ) |
|
424 | ) | |
423 | assert_session_flash(response, |
|
425 | assert_session_flash(response, | |
424 | 'If such email exists, a password reset link was sent to it.') |
|
426 | 'If such email exists, a password reset link was sent to it.') | |
425 |
|
427 | |||
426 | def test_forgot_password(self, user_util): |
|
428 | def test_forgot_password(self, user_util): | |
427 | # this initializes a session |
|
429 | # this initializes a session | |
428 | self.app.get(route_path('reset_password')) |
|
430 | self.app.get(route_path('reset_password')) | |
429 |
|
431 | |||
430 | user = user_util.create_user() |
|
432 | user = user_util.create_user() | |
431 | user_id = user.user_id |
|
433 | user_id = user.user_id | |
432 | email = user.email |
|
434 | email = user.email | |
433 |
|
435 | |||
434 | response = self.app.post(route_path('reset_password'), {'email': email, }) |
|
436 | response = self.app.post(route_path('reset_password'), {'email': email, }) | |
435 |
|
437 | |||
436 | assert_session_flash(response, |
|
438 | assert_session_flash(response, | |
437 | 'If such email exists, a password reset link was sent to it.') |
|
439 | 'If such email exists, a password reset link was sent to it.') | |
438 |
|
440 | |||
439 | # BAD KEY |
|
441 | # BAD KEY | |
440 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') |
|
442 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') | |
441 | response = self.app.get(confirm_url, status=302) |
|
443 | response = self.app.get(confirm_url, status=302) | |
442 | assert response.location.endswith(route_path('reset_password')) |
|
444 | assert response.location.endswith(route_path('reset_password')) | |
443 | assert_session_flash(response, 'Given reset token is invalid') |
|
445 | assert_session_flash(response, 'Given reset token is invalid') | |
444 |
|
446 | |||
445 | response.follow() # cleanup flash |
|
447 | response.follow() # cleanup flash | |
446 |
|
448 | |||
447 | # GOOD KEY |
|
449 | # GOOD KEY | |
448 | key = UserApiKeys.query()\ |
|
450 | key = UserApiKeys.query()\ | |
449 | .filter(UserApiKeys.user_id == user_id)\ |
|
451 | .filter(UserApiKeys.user_id == user_id)\ | |
450 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ |
|
452 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ | |
451 | .first() |
|
453 | .first() | |
452 |
|
454 | |||
453 | assert key |
|
455 | assert key | |
454 |
|
456 | |||
455 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) |
|
457 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) | |
456 | response = self.app.get(confirm_url) |
|
458 | response = self.app.get(confirm_url) | |
457 | assert response.status == '302 Found' |
|
459 | assert response.status == '302 Found' | |
458 | assert response.location.endswith(route_path('login')) |
|
460 | assert response.location.endswith(route_path('login')) | |
459 |
|
461 | |||
460 | assert_session_flash( |
|
462 | assert_session_flash( | |
461 | response, |
|
463 | response, | |
462 | 'Your password reset was successful, ' |
|
464 | 'Your password reset was successful, ' | |
463 | 'a new password has been sent to your email') |
|
465 | 'a new password has been sent to your email') | |
464 |
|
466 | |||
465 | response.follow() |
|
467 | response.follow() | |
466 |
|
468 | |||
467 | def _get_api_whitelist(self, values=None): |
|
469 | def _get_api_whitelist(self, values=None): | |
468 | config = {'api_access_controllers_whitelist': values or []} |
|
470 | config = {'api_access_controllers_whitelist': values or []} | |
469 | return config |
|
471 | return config | |
470 |
|
472 | |||
471 | @pytest.mark.parametrize("test_name, auth_token", [ |
|
473 | @pytest.mark.parametrize("test_name, auth_token", [ | |
472 | ('none', None), |
|
474 | ('none', None), | |
473 | ('empty_string', ''), |
|
475 | ('empty_string', ''), | |
474 | ('fake_number', '123456'), |
|
476 | ('fake_number', '123456'), | |
475 | ('proper_auth_token', None) |
|
477 | ('proper_auth_token', None) | |
476 | ]) |
|
478 | ]) | |
477 | def test_access_not_whitelisted_page_via_auth_token( |
|
479 | def test_access_not_whitelisted_page_via_auth_token( | |
478 | self, test_name, auth_token, user_admin): |
|
480 | self, test_name, auth_token, user_admin): | |
479 |
|
481 | |||
480 | whitelist = self._get_api_whitelist([]) |
|
482 | whitelist = self._get_api_whitelist([]) | |
481 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
483 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
482 | assert [] == whitelist['api_access_controllers_whitelist'] |
|
484 | assert [] == whitelist['api_access_controllers_whitelist'] | |
483 | if test_name == 'proper_auth_token': |
|
485 | if test_name == 'proper_auth_token': | |
484 | # use builtin if api_key is None |
|
486 | # use builtin if api_key is None | |
485 | auth_token = user_admin.api_key |
|
487 | auth_token = user_admin.api_key | |
486 |
|
488 | |||
487 | with fixture.anon_access(False): |
|
489 | with fixture.anon_access(False): | |
488 | self.app.get( |
|
490 | self.app.get( | |
489 | route_path('repo_commit_raw', |
|
491 | route_path('repo_commit_raw', | |
490 | repo_name=HG_REPO, commit_id='tip', |
|
492 | repo_name=HG_REPO, commit_id='tip', | |
491 | params=dict(api_key=auth_token)), |
|
493 | params=dict(api_key=auth_token)), | |
492 | status=302) |
|
494 | status=302) | |
493 |
|
495 | |||
494 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
496 | @pytest.mark.parametrize("test_name, auth_token, code", [ | |
495 | ('none', None, 302), |
|
497 | ('none', None, 302), | |
496 | ('empty_string', '', 302), |
|
498 | ('empty_string', '', 302), | |
497 | ('fake_number', '123456', 302), |
|
499 | ('fake_number', '123456', 302), | |
498 | ('proper_auth_token', None, 200) |
|
500 | ('proper_auth_token', None, 200) | |
499 | ]) |
|
501 | ]) | |
500 | def test_access_whitelisted_page_via_auth_token( |
|
502 | def test_access_whitelisted_page_via_auth_token( | |
501 | self, test_name, auth_token, code, user_admin): |
|
503 | self, test_name, auth_token, code, user_admin): | |
502 |
|
504 | |||
503 | whitelist = self._get_api_whitelist(whitelist_view) |
|
505 | whitelist = self._get_api_whitelist(whitelist_view) | |
504 |
|
506 | |||
505 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
507 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
506 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] |
|
508 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] | |
507 |
|
509 | |||
508 | if test_name == 'proper_auth_token': |
|
510 | if test_name == 'proper_auth_token': | |
509 | auth_token = user_admin.api_key |
|
511 | auth_token = user_admin.api_key | |
510 | assert auth_token |
|
512 | assert auth_token | |
511 |
|
513 | |||
512 | with fixture.anon_access(False): |
|
514 | with fixture.anon_access(False): | |
513 | self.app.get( |
|
515 | self.app.get( | |
514 | route_path('repo_commit_raw', |
|
516 | route_path('repo_commit_raw', | |
515 | repo_name=HG_REPO, commit_id='tip', |
|
517 | repo_name=HG_REPO, commit_id='tip', | |
516 | params=dict(api_key=auth_token)), |
|
518 | params=dict(api_key=auth_token)), | |
517 | status=code) |
|
519 | status=code) | |
518 |
|
520 | |||
519 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
521 | @pytest.mark.parametrize("test_name, auth_token, code", [ | |
520 | ('proper_auth_token', None, 200), |
|
522 | ('proper_auth_token', None, 200), | |
521 | ('wrong_auth_token', '123456', 302), |
|
523 | ('wrong_auth_token', '123456', 302), | |
522 | ]) |
|
524 | ]) | |
523 | def test_access_whitelisted_page_via_auth_token_bound_to_token( |
|
525 | def test_access_whitelisted_page_via_auth_token_bound_to_token( | |
524 | self, test_name, auth_token, code, user_admin): |
|
526 | self, test_name, auth_token, code, user_admin): | |
525 |
|
527 | |||
526 | expected_token = auth_token |
|
528 | expected_token = auth_token | |
527 | if test_name == 'proper_auth_token': |
|
529 | if test_name == 'proper_auth_token': | |
528 | auth_token = user_admin.api_key |
|
530 | auth_token = user_admin.api_key | |
529 | expected_token = auth_token |
|
531 | expected_token = auth_token | |
530 | assert auth_token |
|
532 | assert auth_token | |
531 |
|
533 | |||
532 | whitelist = self._get_api_whitelist([ |
|
534 | whitelist = self._get_api_whitelist([ | |
533 | 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)]) |
|
535 | 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)]) | |
534 |
|
536 | |||
535 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
537 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
536 |
|
538 | |||
537 | with fixture.anon_access(False): |
|
539 | with fixture.anon_access(False): | |
538 | self.app.get( |
|
540 | self.app.get( | |
539 | route_path('repo_commit_raw', |
|
541 | route_path('repo_commit_raw', | |
540 | repo_name=HG_REPO, commit_id='tip', |
|
542 | repo_name=HG_REPO, commit_id='tip', | |
541 | params=dict(api_key=auth_token)), |
|
543 | params=dict(api_key=auth_token)), | |
542 | status=code) |
|
544 | status=code) | |
543 |
|
545 | |||
544 | def test_access_page_via_extra_auth_token(self): |
|
546 | def test_access_page_via_extra_auth_token(self): | |
545 | whitelist = self._get_api_whitelist(whitelist_view) |
|
547 | whitelist = self._get_api_whitelist(whitelist_view) | |
546 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
548 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
547 | assert whitelist_view == \ |
|
549 | assert whitelist_view == \ | |
548 | whitelist['api_access_controllers_whitelist'] |
|
550 | whitelist['api_access_controllers_whitelist'] | |
549 |
|
551 | |||
550 | new_auth_token = AuthTokenModel().create( |
|
552 | new_auth_token = AuthTokenModel().create( | |
551 | TEST_USER_ADMIN_LOGIN, 'test') |
|
553 | TEST_USER_ADMIN_LOGIN, 'test') | |
552 | Session().commit() |
|
554 | Session().commit() | |
553 | with fixture.anon_access(False): |
|
555 | with fixture.anon_access(False): | |
554 | self.app.get( |
|
556 | self.app.get( | |
555 | route_path('repo_commit_raw', |
|
557 | route_path('repo_commit_raw', | |
556 | repo_name=HG_REPO, commit_id='tip', |
|
558 | repo_name=HG_REPO, commit_id='tip', | |
557 | params=dict(api_key=new_auth_token.api_key)), |
|
559 | params=dict(api_key=new_auth_token.api_key)), | |
558 | status=200) |
|
560 | status=200) | |
559 |
|
561 | |||
560 | def test_access_page_via_expired_auth_token(self): |
|
562 | def test_access_page_via_expired_auth_token(self): | |
561 | whitelist = self._get_api_whitelist(whitelist_view) |
|
563 | whitelist = self._get_api_whitelist(whitelist_view) | |
562 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
564 | with mock.patch.dict('rhodecode.CONFIG', whitelist): | |
563 | assert whitelist_view == \ |
|
565 | assert whitelist_view == \ | |
564 | whitelist['api_access_controllers_whitelist'] |
|
566 | whitelist['api_access_controllers_whitelist'] | |
565 |
|
567 | |||
566 | new_auth_token = AuthTokenModel().create( |
|
568 | new_auth_token = AuthTokenModel().create( | |
567 | TEST_USER_ADMIN_LOGIN, 'test') |
|
569 | TEST_USER_ADMIN_LOGIN, 'test') | |
568 | Session().commit() |
|
570 | Session().commit() | |
569 | # patch the api key and make it expired |
|
571 | # patch the api key and make it expired | |
570 | new_auth_token.expires = 0 |
|
572 | new_auth_token.expires = 0 | |
571 | Session().add(new_auth_token) |
|
573 | Session().add(new_auth_token) | |
572 | Session().commit() |
|
574 | Session().commit() | |
573 | with fixture.anon_access(False): |
|
575 | with fixture.anon_access(False): | |
574 | self.app.get( |
|
576 | self.app.get( | |
575 | route_path('repo_commit_raw', |
|
577 | route_path('repo_commit_raw', | |
576 | repo_name=HG_REPO, commit_id='tip', |
|
578 | repo_name=HG_REPO, commit_id='tip', | |
577 | params=dict(api_key=new_auth_token.api_key)), |
|
579 | params=dict(api_key=new_auth_token.api_key)), | |
578 | status=302) |
|
580 | status=302) |
@@ -1,797 +1,808 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Authentication modules |
|
22 | Authentication modules | |
23 | """ |
|
23 | """ | |
24 | import socket |
|
24 | import socket | |
25 | import string |
|
25 | import string | |
26 | import colander |
|
26 | import colander | |
27 | import copy |
|
27 | import copy | |
28 | import logging |
|
28 | import logging | |
29 | import time |
|
29 | import time | |
30 | import traceback |
|
30 | import traceback | |
31 | import warnings |
|
31 | import warnings | |
32 | import functools |
|
32 | import functools | |
33 |
|
33 | |||
34 | from pyramid.threadlocal import get_current_registry |
|
34 | from pyramid.threadlocal import get_current_registry | |
35 |
|
35 | |||
36 | from rhodecode.authentication.interface import IAuthnPluginRegistry |
|
36 | from rhodecode.authentication.interface import IAuthnPluginRegistry | |
37 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
37 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase | |
38 | from rhodecode.lib import rc_cache |
|
38 | from rhodecode.lib import rc_cache | |
39 | from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt |
|
39 | from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt | |
40 | from rhodecode.lib.utils2 import safe_int, safe_str |
|
40 | from rhodecode.lib.utils2 import safe_int, safe_str | |
41 | from rhodecode.lib.exceptions import LdapConnectionError, LdapUsernameError, \ |
|
41 | from rhodecode.lib.exceptions import LdapConnectionError, LdapUsernameError, \ | |
42 | LdapPasswordError |
|
42 | LdapPasswordError | |
43 | from rhodecode.model.db import User |
|
43 | from rhodecode.model.db import User | |
44 | from rhodecode.model.meta import Session |
|
44 | from rhodecode.model.meta import Session | |
45 | from rhodecode.model.settings import SettingsModel |
|
45 | from rhodecode.model.settings import SettingsModel | |
46 | from rhodecode.model.user import UserModel |
|
46 | from rhodecode.model.user import UserModel | |
47 | from rhodecode.model.user_group import UserGroupModel |
|
47 | from rhodecode.model.user_group import UserGroupModel | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | log = logging.getLogger(__name__) |
|
50 | log = logging.getLogger(__name__) | |
51 |
|
51 | |||
52 | # auth types that authenticate() function can receive |
|
52 | # auth types that authenticate() function can receive | |
53 | VCS_TYPE = 'vcs' |
|
53 | VCS_TYPE = 'vcs' | |
54 | HTTP_TYPE = 'http' |
|
54 | HTTP_TYPE = 'http' | |
55 |
|
55 | |||
56 | external_auth_session_key = 'rhodecode.external_auth' |
|
56 | external_auth_session_key = 'rhodecode.external_auth' | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | class hybrid_property(object): |
|
59 | class hybrid_property(object): | |
60 | """ |
|
60 | """ | |
61 | a property decorator that works both for instance and class |
|
61 | a property decorator that works both for instance and class | |
62 | """ |
|
62 | """ | |
63 | def __init__(self, fget, fset=None, fdel=None, expr=None): |
|
63 | def __init__(self, fget, fset=None, fdel=None, expr=None): | |
64 | self.fget = fget |
|
64 | self.fget = fget | |
65 | self.fset = fset |
|
65 | self.fset = fset | |
66 | self.fdel = fdel |
|
66 | self.fdel = fdel | |
67 | self.expr = expr or fget |
|
67 | self.expr = expr or fget | |
68 | functools.update_wrapper(self, fget) |
|
68 | functools.update_wrapper(self, fget) | |
69 |
|
69 | |||
70 | def __get__(self, instance, owner): |
|
70 | def __get__(self, instance, owner): | |
71 | if instance is None: |
|
71 | if instance is None: | |
72 | return self.expr(owner) |
|
72 | return self.expr(owner) | |
73 | else: |
|
73 | else: | |
74 | return self.fget(instance) |
|
74 | return self.fget(instance) | |
75 |
|
75 | |||
76 | def __set__(self, instance, value): |
|
76 | def __set__(self, instance, value): | |
77 | self.fset(instance, value) |
|
77 | self.fset(instance, value) | |
78 |
|
78 | |||
79 | def __delete__(self, instance): |
|
79 | def __delete__(self, instance): | |
80 | self.fdel(instance) |
|
80 | self.fdel(instance) | |
81 |
|
81 | |||
82 |
|
82 | |||
83 | class LazyFormencode(object): |
|
83 | class LazyFormencode(object): | |
84 | def __init__(self, formencode_obj, *args, **kwargs): |
|
84 | def __init__(self, formencode_obj, *args, **kwargs): | |
85 | self.formencode_obj = formencode_obj |
|
85 | self.formencode_obj = formencode_obj | |
86 | self.args = args |
|
86 | self.args = args | |
87 | self.kwargs = kwargs |
|
87 | self.kwargs = kwargs | |
88 |
|
88 | |||
89 | def __call__(self, *args, **kwargs): |
|
89 | def __call__(self, *args, **kwargs): | |
90 | from inspect import isfunction |
|
90 | from inspect import isfunction | |
91 | formencode_obj = self.formencode_obj |
|
91 | formencode_obj = self.formencode_obj | |
92 | if isfunction(formencode_obj): |
|
92 | if isfunction(formencode_obj): | |
93 | # case we wrap validators into functions |
|
93 | # case we wrap validators into functions | |
94 | formencode_obj = self.formencode_obj(*args, **kwargs) |
|
94 | formencode_obj = self.formencode_obj(*args, **kwargs) | |
95 | return formencode_obj(*self.args, **self.kwargs) |
|
95 | return formencode_obj(*self.args, **self.kwargs) | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | class RhodeCodeAuthPluginBase(object): |
|
98 | class RhodeCodeAuthPluginBase(object): | |
99 | # UID is used to register plugin to the registry |
|
99 | # UID is used to register plugin to the registry | |
100 | uid = None |
|
100 | uid = None | |
101 |
|
101 | |||
102 | # cache the authentication request for N amount of seconds. Some kind |
|
102 | # cache the authentication request for N amount of seconds. Some kind | |
103 | # of authentication methods are very heavy and it's very efficient to cache |
|
103 | # of authentication methods are very heavy and it's very efficient to cache | |
104 | # the result of a call. If it's set to None (default) cache is off |
|
104 | # the result of a call. If it's set to None (default) cache is off | |
105 | AUTH_CACHE_TTL = None |
|
105 | AUTH_CACHE_TTL = None | |
106 | AUTH_CACHE = {} |
|
106 | AUTH_CACHE = {} | |
107 |
|
107 | |||
108 | auth_func_attrs = { |
|
108 | auth_func_attrs = { | |
109 | "username": "unique username", |
|
109 | "username": "unique username", | |
110 | "firstname": "first name", |
|
110 | "firstname": "first name", | |
111 | "lastname": "last name", |
|
111 | "lastname": "last name", | |
112 | "email": "email address", |
|
112 | "email": "email address", | |
113 | "groups": '["list", "of", "groups"]', |
|
113 | "groups": '["list", "of", "groups"]', | |
114 | "user_group_sync": |
|
114 | "user_group_sync": | |
115 | 'True|False defines if returned user groups should be synced', |
|
115 | 'True|False defines if returned user groups should be synced', | |
116 | "extern_name": "name in external source of record", |
|
116 | "extern_name": "name in external source of record", | |
117 | "extern_type": "type of external source of record", |
|
117 | "extern_type": "type of external source of record", | |
118 | "admin": 'True|False defines if user should be RhodeCode super admin', |
|
118 | "admin": 'True|False defines if user should be RhodeCode super admin', | |
119 | "active": |
|
119 | "active": | |
120 | 'True|False defines active state of user internally for RhodeCode', |
|
120 | 'True|False defines active state of user internally for RhodeCode', | |
121 | "active_from_extern": |
|
121 | "active_from_extern": | |
122 | "True|False|None, active state from the external auth, " |
|
122 | "True|False|None, active state from the external auth, " | |
123 | "None means use definition from RhodeCode extern_type active value" |
|
123 | "None means use definition from RhodeCode extern_type active value" | |
124 |
|
124 | |||
125 | } |
|
125 | } | |
126 | # set on authenticate() method and via set_auth_type func. |
|
126 | # set on authenticate() method and via set_auth_type func. | |
127 | auth_type = None |
|
127 | auth_type = None | |
128 |
|
128 | |||
129 | # set on authenticate() method and via set_calling_scope_repo, this is a |
|
129 | # set on authenticate() method and via set_calling_scope_repo, this is a | |
130 | # calling scope repository when doing authentication most likely on VCS |
|
130 | # calling scope repository when doing authentication most likely on VCS | |
131 | # operations |
|
131 | # operations | |
132 | acl_repo_name = None |
|
132 | acl_repo_name = None | |
133 |
|
133 | |||
134 | # List of setting names to store encrypted. Plugins may override this list |
|
134 | # List of setting names to store encrypted. Plugins may override this list | |
135 | # to store settings encrypted. |
|
135 | # to store settings encrypted. | |
136 | _settings_encrypted = [] |
|
136 | _settings_encrypted = [] | |
137 |
|
137 | |||
138 | # Mapping of python to DB settings model types. Plugins may override or |
|
138 | # Mapping of python to DB settings model types. Plugins may override or | |
139 | # extend this mapping. |
|
139 | # extend this mapping. | |
140 | _settings_type_map = { |
|
140 | _settings_type_map = { | |
141 | colander.String: 'unicode', |
|
141 | colander.String: 'unicode', | |
142 | colander.Integer: 'int', |
|
142 | colander.Integer: 'int', | |
143 | colander.Boolean: 'bool', |
|
143 | colander.Boolean: 'bool', | |
144 | colander.List: 'list', |
|
144 | colander.List: 'list', | |
145 | } |
|
145 | } | |
146 |
|
146 | |||
147 | # list of keys in settings that are unsafe to be logged, should be passwords |
|
147 | # list of keys in settings that are unsafe to be logged, should be passwords | |
148 | # or other crucial credentials |
|
148 | # or other crucial credentials | |
149 | _settings_unsafe_keys = [] |
|
149 | _settings_unsafe_keys = [] | |
150 |
|
150 | |||
151 | def __init__(self, plugin_id): |
|
151 | def __init__(self, plugin_id): | |
152 | self._plugin_id = plugin_id |
|
152 | self._plugin_id = plugin_id | |
|
153 | self._settings = {} | |||
153 |
|
154 | |||
154 | def __str__(self): |
|
155 | def __str__(self): | |
155 | return self.get_id() |
|
156 | return self.get_id() | |
156 |
|
157 | |||
157 | def _get_setting_full_name(self, name): |
|
158 | def _get_setting_full_name(self, name): | |
158 | """ |
|
159 | """ | |
159 | Return the full setting name used for storing values in the database. |
|
160 | Return the full setting name used for storing values in the database. | |
160 | """ |
|
161 | """ | |
161 | # TODO: johbo: Using the name here is problematic. It would be good to |
|
162 | # TODO: johbo: Using the name here is problematic. It would be good to | |
162 | # introduce either new models in the database to hold Plugin and |
|
163 | # introduce either new models in the database to hold Plugin and | |
163 | # PluginSetting or to use the plugin id here. |
|
164 | # PluginSetting or to use the plugin id here. | |
164 | return 'auth_{}_{}'.format(self.name, name) |
|
165 | return 'auth_{}_{}'.format(self.name, name) | |
165 |
|
166 | |||
166 | def _get_setting_type(self, name): |
|
167 | def _get_setting_type(self, name): | |
167 | """ |
|
168 | """ | |
168 | Return the type of a setting. This type is defined by the SettingsModel |
|
169 | Return the type of a setting. This type is defined by the SettingsModel | |
169 | and determines how the setting is stored in DB. Optionally the suffix |
|
170 | and determines how the setting is stored in DB. Optionally the suffix | |
170 | `.encrypted` is appended to instruct SettingsModel to store it |
|
171 | `.encrypted` is appended to instruct SettingsModel to store it | |
171 | encrypted. |
|
172 | encrypted. | |
172 | """ |
|
173 | """ | |
173 | schema_node = self.get_settings_schema().get(name) |
|
174 | schema_node = self.get_settings_schema().get(name) | |
174 | db_type = self._settings_type_map.get( |
|
175 | db_type = self._settings_type_map.get( | |
175 | type(schema_node.typ), 'unicode') |
|
176 | type(schema_node.typ), 'unicode') | |
176 | if name in self._settings_encrypted: |
|
177 | if name in self._settings_encrypted: | |
177 | db_type = '{}.encrypted'.format(db_type) |
|
178 | db_type = '{}.encrypted'.format(db_type) | |
178 | return db_type |
|
179 | return db_type | |
179 |
|
180 | |||
180 | @classmethod |
|
181 | @classmethod | |
181 | def docs(cls): |
|
182 | def docs(cls): | |
182 | """ |
|
183 | """ | |
183 | Defines documentation url which helps with plugin setup |
|
184 | Defines documentation url which helps with plugin setup | |
184 | """ |
|
185 | """ | |
185 | return '' |
|
186 | return '' | |
186 |
|
187 | |||
187 | @classmethod |
|
188 | @classmethod | |
188 | def icon(cls): |
|
189 | def icon(cls): | |
189 | """ |
|
190 | """ | |
190 | Defines ICON in SVG format for authentication method |
|
191 | Defines ICON in SVG format for authentication method | |
191 | """ |
|
192 | """ | |
192 | return '' |
|
193 | return '' | |
193 |
|
194 | |||
194 | def is_enabled(self): |
|
195 | def is_enabled(self): | |
195 | """ |
|
196 | """ | |
196 | Returns true if this plugin is enabled. An enabled plugin can be |
|
197 | Returns true if this plugin is enabled. An enabled plugin can be | |
197 | configured in the admin interface but it is not consulted during |
|
198 | configured in the admin interface but it is not consulted during | |
198 | authentication. |
|
199 | authentication. | |
199 | """ |
|
200 | """ | |
200 | auth_plugins = SettingsModel().get_auth_plugins() |
|
201 | auth_plugins = SettingsModel().get_auth_plugins() | |
201 | return self.get_id() in auth_plugins |
|
202 | return self.get_id() in auth_plugins | |
202 |
|
203 | |||
203 | def is_active(self, plugin_cached_settings=None): |
|
204 | def is_active(self, plugin_cached_settings=None): | |
204 | """ |
|
205 | """ | |
205 | Returns true if the plugin is activated. An activated plugin is |
|
206 | Returns true if the plugin is activated. An activated plugin is | |
206 | consulted during authentication, assumed it is also enabled. |
|
207 | consulted during authentication, assumed it is also enabled. | |
207 | """ |
|
208 | """ | |
208 | return self.get_setting_by_name( |
|
209 | return self.get_setting_by_name( | |
209 | 'enabled', plugin_cached_settings=plugin_cached_settings) |
|
210 | 'enabled', plugin_cached_settings=plugin_cached_settings) | |
210 |
|
211 | |||
211 | def get_id(self): |
|
212 | def get_id(self): | |
212 | """ |
|
213 | """ | |
213 | Returns the plugin id. |
|
214 | Returns the plugin id. | |
214 | """ |
|
215 | """ | |
215 | return self._plugin_id |
|
216 | return self._plugin_id | |
216 |
|
217 | |||
217 | def get_display_name(self): |
|
218 | def get_display_name(self): | |
218 | """ |
|
219 | """ | |
219 | Returns a translation string for displaying purposes. |
|
220 | Returns a translation string for displaying purposes. | |
220 | """ |
|
221 | """ | |
221 | raise NotImplementedError('Not implemented in base class') |
|
222 | raise NotImplementedError('Not implemented in base class') | |
222 |
|
223 | |||
223 | def get_settings_schema(self): |
|
224 | def get_settings_schema(self): | |
224 | """ |
|
225 | """ | |
225 | Returns a colander schema, representing the plugin settings. |
|
226 | Returns a colander schema, representing the plugin settings. | |
226 | """ |
|
227 | """ | |
227 | return AuthnPluginSettingsSchemaBase() |
|
228 | return AuthnPluginSettingsSchemaBase() | |
228 |
|
229 | |||
229 |
def |
|
230 | def _propagate_settings(self, raw_settings): | |
230 | """ |
|
|||
231 | Returns the plugin settings as dictionary. |
|
|||
232 | """ |
|
|||
233 | settings = {} |
|
231 | settings = {} | |
234 | raw_settings = SettingsModel().get_all_settings() |
|
|||
235 | for node in self.get_settings_schema(): |
|
232 | for node in self.get_settings_schema(): | |
236 | settings[node.name] = self.get_setting_by_name( |
|
233 | settings[node.name] = self.get_setting_by_name( | |
237 | node.name, plugin_cached_settings=raw_settings) |
|
234 | node.name, plugin_cached_settings=raw_settings) | |
238 | return settings |
|
235 | return settings | |
239 |
|
236 | |||
|
237 | def get_settings(self, use_cache=True): | |||
|
238 | """ | |||
|
239 | Returns the plugin settings as dictionary. | |||
|
240 | """ | |||
|
241 | if self._settings != {} and use_cache: | |||
|
242 | return self._settings | |||
|
243 | ||||
|
244 | raw_settings = SettingsModel().get_all_settings() | |||
|
245 | settings = self._propagate_settings(raw_settings) | |||
|
246 | ||||
|
247 | self._settings = settings | |||
|
248 | return self._settings | |||
|
249 | ||||
240 | def get_setting_by_name(self, name, default=None, plugin_cached_settings=None): |
|
250 | def get_setting_by_name(self, name, default=None, plugin_cached_settings=None): | |
241 | """ |
|
251 | """ | |
242 | Returns a plugin setting by name. |
|
252 | Returns a plugin setting by name. | |
243 | """ |
|
253 | """ | |
244 | full_name = 'rhodecode_{}'.format(self._get_setting_full_name(name)) |
|
254 | full_name = 'rhodecode_{}'.format(self._get_setting_full_name(name)) | |
245 | if plugin_cached_settings: |
|
255 | if plugin_cached_settings: | |
246 | plugin_settings = plugin_cached_settings |
|
256 | plugin_settings = plugin_cached_settings | |
247 | else: |
|
257 | else: | |
248 | plugin_settings = SettingsModel().get_all_settings() |
|
258 | plugin_settings = SettingsModel().get_all_settings() | |
249 |
|
259 | |||
250 | if full_name in plugin_settings: |
|
260 | if full_name in plugin_settings: | |
251 | return plugin_settings[full_name] |
|
261 | return plugin_settings[full_name] | |
252 | else: |
|
262 | else: | |
253 | return default |
|
263 | return default | |
254 |
|
264 | |||
255 | def create_or_update_setting(self, name, value): |
|
265 | def create_or_update_setting(self, name, value): | |
256 | """ |
|
266 | """ | |
257 | Create or update a setting for this plugin in the persistent storage. |
|
267 | Create or update a setting for this plugin in the persistent storage. | |
258 | """ |
|
268 | """ | |
259 | full_name = self._get_setting_full_name(name) |
|
269 | full_name = self._get_setting_full_name(name) | |
260 | type_ = self._get_setting_type(name) |
|
270 | type_ = self._get_setting_type(name) | |
261 | db_setting = SettingsModel().create_or_update_setting( |
|
271 | db_setting = SettingsModel().create_or_update_setting( | |
262 | full_name, value, type_) |
|
272 | full_name, value, type_) | |
263 | return db_setting.app_settings_value |
|
273 | return db_setting.app_settings_value | |
264 |
|
274 | |||
265 | def log_safe_settings(self, settings): |
|
275 | def log_safe_settings(self, settings): | |
266 | """ |
|
276 | """ | |
267 | returns a log safe representation of settings, without any secrets |
|
277 | returns a log safe representation of settings, without any secrets | |
268 | """ |
|
278 | """ | |
269 | settings_copy = copy.deepcopy(settings) |
|
279 | settings_copy = copy.deepcopy(settings) | |
270 | for k in self._settings_unsafe_keys: |
|
280 | for k in self._settings_unsafe_keys: | |
271 | if k in settings_copy: |
|
281 | if k in settings_copy: | |
272 | del settings_copy[k] |
|
282 | del settings_copy[k] | |
273 | return settings_copy |
|
283 | return settings_copy | |
274 |
|
284 | |||
275 | @hybrid_property |
|
285 | @hybrid_property | |
276 | def name(self): |
|
286 | def name(self): | |
277 | """ |
|
287 | """ | |
278 | Returns the name of this authentication plugin. |
|
288 | Returns the name of this authentication plugin. | |
279 |
|
289 | |||
280 | :returns: string |
|
290 | :returns: string | |
281 | """ |
|
291 | """ | |
282 | raise NotImplementedError("Not implemented in base class") |
|
292 | raise NotImplementedError("Not implemented in base class") | |
283 |
|
293 | |||
284 | def get_url_slug(self): |
|
294 | def get_url_slug(self): | |
285 | """ |
|
295 | """ | |
286 | Returns a slug which should be used when constructing URLs which refer |
|
296 | Returns a slug which should be used when constructing URLs which refer | |
287 | to this plugin. By default it returns the plugin name. If the name is |
|
297 | to this plugin. By default it returns the plugin name. If the name is | |
288 | not suitable for using it in an URL the plugin should override this |
|
298 | not suitable for using it in an URL the plugin should override this | |
289 | method. |
|
299 | method. | |
290 | """ |
|
300 | """ | |
291 | return self.name |
|
301 | return self.name | |
292 |
|
302 | |||
293 | @property |
|
303 | @property | |
294 | def is_headers_auth(self): |
|
304 | def is_headers_auth(self): | |
295 | """ |
|
305 | """ | |
296 | Returns True if this authentication plugin uses HTTP headers as |
|
306 | Returns True if this authentication plugin uses HTTP headers as | |
297 | authentication method. |
|
307 | authentication method. | |
298 | """ |
|
308 | """ | |
299 | return False |
|
309 | return False | |
300 |
|
310 | |||
301 | @hybrid_property |
|
311 | @hybrid_property | |
302 | def is_container_auth(self): |
|
312 | def is_container_auth(self): | |
303 | """ |
|
313 | """ | |
304 | Deprecated method that indicates if this authentication plugin uses |
|
314 | Deprecated method that indicates if this authentication plugin uses | |
305 | HTTP headers as authentication method. |
|
315 | HTTP headers as authentication method. | |
306 | """ |
|
316 | """ | |
307 | warnings.warn( |
|
317 | warnings.warn( | |
308 | 'Use is_headers_auth instead.', category=DeprecationWarning) |
|
318 | 'Use is_headers_auth instead.', category=DeprecationWarning) | |
309 | return self.is_headers_auth |
|
319 | return self.is_headers_auth | |
310 |
|
320 | |||
311 | @hybrid_property |
|
321 | @hybrid_property | |
312 | def allows_creating_users(self): |
|
322 | def allows_creating_users(self): | |
313 | """ |
|
323 | """ | |
314 | Defines if Plugin allows users to be created on-the-fly when |
|
324 | Defines if Plugin allows users to be created on-the-fly when | |
315 | authentication is called. Controls how external plugins should behave |
|
325 | authentication is called. Controls how external plugins should behave | |
316 | in terms if they are allowed to create new users, or not. Base plugins |
|
326 | in terms if they are allowed to create new users, or not. Base plugins | |
317 | should not be allowed to, but External ones should be ! |
|
327 | should not be allowed to, but External ones should be ! | |
318 |
|
328 | |||
319 | :return: bool |
|
329 | :return: bool | |
320 | """ |
|
330 | """ | |
321 | return False |
|
331 | return False | |
322 |
|
332 | |||
323 | def set_auth_type(self, auth_type): |
|
333 | def set_auth_type(self, auth_type): | |
324 | self.auth_type = auth_type |
|
334 | self.auth_type = auth_type | |
325 |
|
335 | |||
326 | def set_calling_scope_repo(self, acl_repo_name): |
|
336 | def set_calling_scope_repo(self, acl_repo_name): | |
327 | self.acl_repo_name = acl_repo_name |
|
337 | self.acl_repo_name = acl_repo_name | |
328 |
|
338 | |||
329 | def allows_authentication_from( |
|
339 | def allows_authentication_from( | |
330 | self, user, allows_non_existing_user=True, |
|
340 | self, user, allows_non_existing_user=True, | |
331 | allowed_auth_plugins=None, allowed_auth_sources=None): |
|
341 | allowed_auth_plugins=None, allowed_auth_sources=None): | |
332 | """ |
|
342 | """ | |
333 | Checks if this authentication module should accept a request for |
|
343 | Checks if this authentication module should accept a request for | |
334 | the current user. |
|
344 | the current user. | |
335 |
|
345 | |||
336 | :param user: user object fetched using plugin's get_user() method. |
|
346 | :param user: user object fetched using plugin's get_user() method. | |
337 | :param allows_non_existing_user: if True, don't allow the |
|
347 | :param allows_non_existing_user: if True, don't allow the | |
338 | user to be empty, meaning not existing in our database |
|
348 | user to be empty, meaning not existing in our database | |
339 | :param allowed_auth_plugins: if provided, users extern_type will be |
|
349 | :param allowed_auth_plugins: if provided, users extern_type will be | |
340 | checked against a list of provided extern types, which are plugin |
|
350 | checked against a list of provided extern types, which are plugin | |
341 | auth_names in the end |
|
351 | auth_names in the end | |
342 | :param allowed_auth_sources: authentication type allowed, |
|
352 | :param allowed_auth_sources: authentication type allowed, | |
343 | `http` or `vcs` default is both. |
|
353 | `http` or `vcs` default is both. | |
344 | defines if plugin will accept only http authentication vcs |
|
354 | defines if plugin will accept only http authentication vcs | |
345 | authentication(git/hg) or both |
|
355 | authentication(git/hg) or both | |
346 | :returns: boolean |
|
356 | :returns: boolean | |
347 | """ |
|
357 | """ | |
348 | if not user and not allows_non_existing_user: |
|
358 | if not user and not allows_non_existing_user: | |
349 | log.debug('User is empty but plugin does not allow empty users,' |
|
359 | log.debug('User is empty but plugin does not allow empty users,' | |
350 | 'not allowed to authenticate') |
|
360 | 'not allowed to authenticate') | |
351 | return False |
|
361 | return False | |
352 |
|
362 | |||
353 | expected_auth_plugins = allowed_auth_plugins or [self.name] |
|
363 | expected_auth_plugins = allowed_auth_plugins or [self.name] | |
354 | if user and (user.extern_type and |
|
364 | if user and (user.extern_type and | |
355 | user.extern_type not in expected_auth_plugins): |
|
365 | user.extern_type not in expected_auth_plugins): | |
356 | log.debug( |
|
366 | log.debug( | |
357 | 'User `%s` is bound to `%s` auth type. Plugin allows only ' |
|
367 | 'User `%s` is bound to `%s` auth type. Plugin allows only ' | |
358 | '%s, skipping', user, user.extern_type, expected_auth_plugins) |
|
368 | '%s, skipping', user, user.extern_type, expected_auth_plugins) | |
359 |
|
369 | |||
360 | return False |
|
370 | return False | |
361 |
|
371 | |||
362 | # by default accept both |
|
372 | # by default accept both | |
363 | expected_auth_from = allowed_auth_sources or [HTTP_TYPE, VCS_TYPE] |
|
373 | expected_auth_from = allowed_auth_sources or [HTTP_TYPE, VCS_TYPE] | |
364 | if self.auth_type not in expected_auth_from: |
|
374 | if self.auth_type not in expected_auth_from: | |
365 | log.debug('Current auth source is %s but plugin only allows %s', |
|
375 | log.debug('Current auth source is %s but plugin only allows %s', | |
366 | self.auth_type, expected_auth_from) |
|
376 | self.auth_type, expected_auth_from) | |
367 | return False |
|
377 | return False | |
368 |
|
378 | |||
369 | return True |
|
379 | return True | |
370 |
|
380 | |||
371 | def get_user(self, username=None, **kwargs): |
|
381 | def get_user(self, username=None, **kwargs): | |
372 | """ |
|
382 | """ | |
373 | Helper method for user fetching in plugins, by default it's using |
|
383 | Helper method for user fetching in plugins, by default it's using | |
374 | simple fetch by username, but this method can be custimized in plugins |
|
384 | simple fetch by username, but this method can be custimized in plugins | |
375 | eg. headers auth plugin to fetch user by environ params |
|
385 | eg. headers auth plugin to fetch user by environ params | |
376 |
|
386 | |||
377 | :param username: username if given to fetch from database |
|
387 | :param username: username if given to fetch from database | |
378 | :param kwargs: extra arguments needed for user fetching. |
|
388 | :param kwargs: extra arguments needed for user fetching. | |
379 | """ |
|
389 | """ | |
380 | user = None |
|
390 | user = None | |
381 | log.debug( |
|
391 | log.debug( | |
382 | 'Trying to fetch user `%s` from RhodeCode database', username) |
|
392 | 'Trying to fetch user `%s` from RhodeCode database', username) | |
383 | if username: |
|
393 | if username: | |
384 | user = User.get_by_username(username) |
|
394 | user = User.get_by_username(username) | |
385 | if not user: |
|
395 | if not user: | |
386 | log.debug('User not found, fallback to fetch user in ' |
|
396 | log.debug('User not found, fallback to fetch user in ' | |
387 | 'case insensitive mode') |
|
397 | 'case insensitive mode') | |
388 | user = User.get_by_username(username, case_insensitive=True) |
|
398 | user = User.get_by_username(username, case_insensitive=True) | |
389 | else: |
|
399 | else: | |
390 | log.debug('provided username:`%s` is empty skipping...', username) |
|
400 | log.debug('provided username:`%s` is empty skipping...', username) | |
391 | if not user: |
|
401 | if not user: | |
392 | log.debug('User `%s` not found in database', username) |
|
402 | log.debug('User `%s` not found in database', username) | |
393 | else: |
|
403 | else: | |
394 | log.debug('Got DB user:%s', user) |
|
404 | log.debug('Got DB user:%s', user) | |
395 | return user |
|
405 | return user | |
396 |
|
406 | |||
397 | def user_activation_state(self): |
|
407 | def user_activation_state(self): | |
398 | """ |
|
408 | """ | |
399 | Defines user activation state when creating new users |
|
409 | Defines user activation state when creating new users | |
400 |
|
410 | |||
401 | :returns: boolean |
|
411 | :returns: boolean | |
402 | """ |
|
412 | """ | |
403 | raise NotImplementedError("Not implemented in base class") |
|
413 | raise NotImplementedError("Not implemented in base class") | |
404 |
|
414 | |||
405 | def auth(self, userobj, username, passwd, settings, **kwargs): |
|
415 | def auth(self, userobj, username, passwd, settings, **kwargs): | |
406 | """ |
|
416 | """ | |
407 | Given a user object (which may be null), username, a plaintext |
|
417 | Given a user object (which may be null), username, a plaintext | |
408 | password, and a settings object (containing all the keys needed as |
|
418 | password, and a settings object (containing all the keys needed as | |
409 | listed in settings()), authenticate this user's login attempt. |
|
419 | listed in settings()), authenticate this user's login attempt. | |
410 |
|
420 | |||
411 | Return None on failure. On success, return a dictionary of the form: |
|
421 | Return None on failure. On success, return a dictionary of the form: | |
412 |
|
422 | |||
413 | see: RhodeCodeAuthPluginBase.auth_func_attrs |
|
423 | see: RhodeCodeAuthPluginBase.auth_func_attrs | |
414 | This is later validated for correctness |
|
424 | This is later validated for correctness | |
415 | """ |
|
425 | """ | |
416 | raise NotImplementedError("not implemented in base class") |
|
426 | raise NotImplementedError("not implemented in base class") | |
417 |
|
427 | |||
418 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
428 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): | |
419 | """ |
|
429 | """ | |
420 | Wrapper to call self.auth() that validates call on it |
|
430 | Wrapper to call self.auth() that validates call on it | |
421 |
|
431 | |||
422 | :param userobj: userobj |
|
432 | :param userobj: userobj | |
423 | :param username: username |
|
433 | :param username: username | |
424 | :param passwd: plaintext password |
|
434 | :param passwd: plaintext password | |
425 | :param settings: plugin settings |
|
435 | :param settings: plugin settings | |
426 | """ |
|
436 | """ | |
427 | auth = self.auth(userobj, username, passwd, settings, **kwargs) |
|
437 | auth = self.auth(userobj, username, passwd, settings, **kwargs) | |
428 | if auth: |
|
438 | if auth: | |
429 | auth['_plugin'] = self.name |
|
439 | auth['_plugin'] = self.name | |
430 | auth['_ttl_cache'] = self.get_ttl_cache(settings) |
|
440 | auth['_ttl_cache'] = self.get_ttl_cache(settings) | |
431 | # check if hash should be migrated ? |
|
441 | # check if hash should be migrated ? | |
432 | new_hash = auth.get('_hash_migrate') |
|
442 | new_hash = auth.get('_hash_migrate') | |
433 | if new_hash: |
|
443 | if new_hash: | |
434 | self._migrate_hash_to_bcrypt(username, passwd, new_hash) |
|
444 | self._migrate_hash_to_bcrypt(username, passwd, new_hash) | |
435 | if 'user_group_sync' not in auth: |
|
445 | if 'user_group_sync' not in auth: | |
436 | auth['user_group_sync'] = False |
|
446 | auth['user_group_sync'] = False | |
437 | return self._validate_auth_return(auth) |
|
447 | return self._validate_auth_return(auth) | |
438 | return auth |
|
448 | return auth | |
439 |
|
449 | |||
440 | def _migrate_hash_to_bcrypt(self, username, password, new_hash): |
|
450 | def _migrate_hash_to_bcrypt(self, username, password, new_hash): | |
441 | new_hash_cypher = _RhodeCodeCryptoBCrypt() |
|
451 | new_hash_cypher = _RhodeCodeCryptoBCrypt() | |
442 | # extra checks, so make sure new hash is correct. |
|
452 | # extra checks, so make sure new hash is correct. | |
443 | password_encoded = safe_str(password) |
|
453 | password_encoded = safe_str(password) | |
444 | if new_hash and new_hash_cypher.hash_check( |
|
454 | if new_hash and new_hash_cypher.hash_check( | |
445 | password_encoded, new_hash): |
|
455 | password_encoded, new_hash): | |
446 | cur_user = User.get_by_username(username) |
|
456 | cur_user = User.get_by_username(username) | |
447 | cur_user.password = new_hash |
|
457 | cur_user.password = new_hash | |
448 | Session().add(cur_user) |
|
458 | Session().add(cur_user) | |
449 | Session().flush() |
|
459 | Session().flush() | |
450 | log.info('Migrated user %s hash to bcrypt', cur_user) |
|
460 | log.info('Migrated user %s hash to bcrypt', cur_user) | |
451 |
|
461 | |||
452 | def _validate_auth_return(self, ret): |
|
462 | def _validate_auth_return(self, ret): | |
453 | if not isinstance(ret, dict): |
|
463 | if not isinstance(ret, dict): | |
454 | raise Exception('returned value from auth must be a dict') |
|
464 | raise Exception('returned value from auth must be a dict') | |
455 | for k in self.auth_func_attrs: |
|
465 | for k in self.auth_func_attrs: | |
456 | if k not in ret: |
|
466 | if k not in ret: | |
457 | raise Exception('Missing %s attribute from returned data' % k) |
|
467 | raise Exception('Missing %s attribute from returned data' % k) | |
458 | return ret |
|
468 | return ret | |
459 |
|
469 | |||
460 | def get_ttl_cache(self, settings=None): |
|
470 | def get_ttl_cache(self, settings=None): | |
461 | plugin_settings = settings or self.get_settings() |
|
471 | plugin_settings = settings or self.get_settings() | |
462 | # we set default to 30, we make a compromise here, |
|
472 | # we set default to 30, we make a compromise here, | |
463 | # performance > security, mostly due to LDAP/SVN, majority |
|
473 | # performance > security, mostly due to LDAP/SVN, majority | |
464 | # of users pick cache_ttl to be enabled |
|
474 | # of users pick cache_ttl to be enabled | |
465 | from rhodecode.authentication import plugin_default_auth_ttl |
|
475 | from rhodecode.authentication import plugin_default_auth_ttl | |
466 | cache_ttl = plugin_default_auth_ttl |
|
476 | cache_ttl = plugin_default_auth_ttl | |
467 |
|
477 | |||
468 | if isinstance(self.AUTH_CACHE_TTL, (int, long)): |
|
478 | if isinstance(self.AUTH_CACHE_TTL, (int, long)): | |
469 | # plugin cache set inside is more important than the settings value |
|
479 | # plugin cache set inside is more important than the settings value | |
470 | cache_ttl = self.AUTH_CACHE_TTL |
|
480 | cache_ttl = self.AUTH_CACHE_TTL | |
471 | elif plugin_settings.get('cache_ttl'): |
|
481 | elif plugin_settings.get('cache_ttl'): | |
472 | cache_ttl = safe_int(plugin_settings.get('cache_ttl'), 0) |
|
482 | cache_ttl = safe_int(plugin_settings.get('cache_ttl'), 0) | |
473 |
|
483 | |||
474 | plugin_cache_active = bool(cache_ttl and cache_ttl > 0) |
|
484 | plugin_cache_active = bool(cache_ttl and cache_ttl > 0) | |
475 | return plugin_cache_active, cache_ttl |
|
485 | return plugin_cache_active, cache_ttl | |
476 |
|
486 | |||
477 |
|
487 | |||
478 | class RhodeCodeExternalAuthPlugin(RhodeCodeAuthPluginBase): |
|
488 | class RhodeCodeExternalAuthPlugin(RhodeCodeAuthPluginBase): | |
479 |
|
489 | |||
480 | @hybrid_property |
|
490 | @hybrid_property | |
481 | def allows_creating_users(self): |
|
491 | def allows_creating_users(self): | |
482 | return True |
|
492 | return True | |
483 |
|
493 | |||
484 | def use_fake_password(self): |
|
494 | def use_fake_password(self): | |
485 | """ |
|
495 | """ | |
486 | Return a boolean that indicates whether or not we should set the user's |
|
496 | Return a boolean that indicates whether or not we should set the user's | |
487 | password to a random value when it is authenticated by this plugin. |
|
497 | password to a random value when it is authenticated by this plugin. | |
488 | If your plugin provides authentication, then you will generally |
|
498 | If your plugin provides authentication, then you will generally | |
489 | want this. |
|
499 | want this. | |
490 |
|
500 | |||
491 | :returns: boolean |
|
501 | :returns: boolean | |
492 | """ |
|
502 | """ | |
493 | raise NotImplementedError("Not implemented in base class") |
|
503 | raise NotImplementedError("Not implemented in base class") | |
494 |
|
504 | |||
495 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
505 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): | |
496 | # at this point _authenticate calls plugin's `auth()` function |
|
506 | # at this point _authenticate calls plugin's `auth()` function | |
497 | auth = super(RhodeCodeExternalAuthPlugin, self)._authenticate( |
|
507 | auth = super(RhodeCodeExternalAuthPlugin, self)._authenticate( | |
498 | userobj, username, passwd, settings, **kwargs) |
|
508 | userobj, username, passwd, settings, **kwargs) | |
499 |
|
509 | |||
500 | if auth: |
|
510 | if auth: | |
501 | # maybe plugin will clean the username ? |
|
511 | # maybe plugin will clean the username ? | |
502 | # we should use the return value |
|
512 | # we should use the return value | |
503 | username = auth['username'] |
|
513 | username = auth['username'] | |
504 |
|
514 | |||
505 | # if external source tells us that user is not active, we should |
|
515 | # if external source tells us that user is not active, we should | |
506 | # skip rest of the process. This can prevent from creating users in |
|
516 | # skip rest of the process. This can prevent from creating users in | |
507 | # RhodeCode when using external authentication, but if it's |
|
517 | # RhodeCode when using external authentication, but if it's | |
508 | # inactive user we shouldn't create that user anyway |
|
518 | # inactive user we shouldn't create that user anyway | |
509 | if auth['active_from_extern'] is False: |
|
519 | if auth['active_from_extern'] is False: | |
510 | log.warning( |
|
520 | log.warning( | |
511 | "User %s authenticated against %s, but is inactive", |
|
521 | "User %s authenticated against %s, but is inactive", | |
512 | username, self.__module__) |
|
522 | username, self.__module__) | |
513 | return None |
|
523 | return None | |
514 |
|
524 | |||
515 | cur_user = User.get_by_username(username, case_insensitive=True) |
|
525 | cur_user = User.get_by_username(username, case_insensitive=True) | |
516 | is_user_existing = cur_user is not None |
|
526 | is_user_existing = cur_user is not None | |
517 |
|
527 | |||
518 | if is_user_existing: |
|
528 | if is_user_existing: | |
519 | log.debug('Syncing user `%s` from ' |
|
529 | log.debug('Syncing user `%s` from ' | |
520 | '`%s` plugin', username, self.name) |
|
530 | '`%s` plugin', username, self.name) | |
521 | else: |
|
531 | else: | |
522 | log.debug('Creating non existing user `%s` from ' |
|
532 | log.debug('Creating non existing user `%s` from ' | |
523 | '`%s` plugin', username, self.name) |
|
533 | '`%s` plugin', username, self.name) | |
524 |
|
534 | |||
525 | if self.allows_creating_users: |
|
535 | if self.allows_creating_users: | |
526 | log.debug('Plugin `%s` allows to ' |
|
536 | log.debug('Plugin `%s` allows to ' | |
527 | 'create new users', self.name) |
|
537 | 'create new users', self.name) | |
528 | else: |
|
538 | else: | |
529 | log.debug('Plugin `%s` does not allow to ' |
|
539 | log.debug('Plugin `%s` does not allow to ' | |
530 | 'create new users', self.name) |
|
540 | 'create new users', self.name) | |
531 |
|
541 | |||
532 | user_parameters = { |
|
542 | user_parameters = { | |
533 | 'username': username, |
|
543 | 'username': username, | |
534 | 'email': auth["email"], |
|
544 | 'email': auth["email"], | |
535 | 'firstname': auth["firstname"], |
|
545 | 'firstname': auth["firstname"], | |
536 | 'lastname': auth["lastname"], |
|
546 | 'lastname': auth["lastname"], | |
537 | 'active': auth["active"], |
|
547 | 'active': auth["active"], | |
538 | 'admin': auth["admin"], |
|
548 | 'admin': auth["admin"], | |
539 | 'extern_name': auth["extern_name"], |
|
549 | 'extern_name': auth["extern_name"], | |
540 | 'extern_type': self.name, |
|
550 | 'extern_type': self.name, | |
541 | 'plugin': self, |
|
551 | 'plugin': self, | |
542 | 'allow_to_create_user': self.allows_creating_users, |
|
552 | 'allow_to_create_user': self.allows_creating_users, | |
543 | } |
|
553 | } | |
544 |
|
554 | |||
545 | if not is_user_existing: |
|
555 | if not is_user_existing: | |
546 | if self.use_fake_password(): |
|
556 | if self.use_fake_password(): | |
547 | # Randomize the PW because we don't need it, but don't want |
|
557 | # Randomize the PW because we don't need it, but don't want | |
548 | # them blank either |
|
558 | # them blank either | |
549 | passwd = PasswordGenerator().gen_password(length=16) |
|
559 | passwd = PasswordGenerator().gen_password(length=16) | |
550 | user_parameters['password'] = passwd |
|
560 | user_parameters['password'] = passwd | |
551 | else: |
|
561 | else: | |
552 | # Since the password is required by create_or_update method of |
|
562 | # Since the password is required by create_or_update method of | |
553 | # UserModel, we need to set it explicitly. |
|
563 | # UserModel, we need to set it explicitly. | |
554 | # The create_or_update method is smart and recognises the |
|
564 | # The create_or_update method is smart and recognises the | |
555 | # password hashes as well. |
|
565 | # password hashes as well. | |
556 | user_parameters['password'] = cur_user.password |
|
566 | user_parameters['password'] = cur_user.password | |
557 |
|
567 | |||
558 | # we either create or update users, we also pass the flag |
|
568 | # we either create or update users, we also pass the flag | |
559 | # that controls if this method can actually do that. |
|
569 | # that controls if this method can actually do that. | |
560 | # raises NotAllowedToCreateUserError if it cannot, and we try to. |
|
570 | # raises NotAllowedToCreateUserError if it cannot, and we try to. | |
561 | user = UserModel().create_or_update(**user_parameters) |
|
571 | user = UserModel().create_or_update(**user_parameters) | |
562 | Session().flush() |
|
572 | Session().flush() | |
563 | # enforce user is just in given groups, all of them has to be ones |
|
573 | # enforce user is just in given groups, all of them has to be ones | |
564 | # created from plugins. We store this info in _group_data JSON |
|
574 | # created from plugins. We store this info in _group_data JSON | |
565 | # field |
|
575 | # field | |
566 |
|
576 | |||
567 | if auth['user_group_sync']: |
|
577 | if auth['user_group_sync']: | |
568 | try: |
|
578 | try: | |
569 | groups = auth['groups'] or [] |
|
579 | groups = auth['groups'] or [] | |
570 | log.debug( |
|
580 | log.debug( | |
571 | 'Performing user_group sync based on set `%s` ' |
|
581 | 'Performing user_group sync based on set `%s` ' | |
572 | 'returned by `%s` plugin', groups, self.name) |
|
582 | 'returned by `%s` plugin', groups, self.name) | |
573 | UserGroupModel().enforce_groups(user, groups, self.name) |
|
583 | UserGroupModel().enforce_groups(user, groups, self.name) | |
574 | except Exception: |
|
584 | except Exception: | |
575 | # for any reason group syncing fails, we should |
|
585 | # for any reason group syncing fails, we should | |
576 | # proceed with login |
|
586 | # proceed with login | |
577 | log.error(traceback.format_exc()) |
|
587 | log.error(traceback.format_exc()) | |
578 |
|
588 | |||
579 | Session().commit() |
|
589 | Session().commit() | |
580 | return auth |
|
590 | return auth | |
581 |
|
591 | |||
582 |
|
592 | |||
583 | class AuthLdapBase(object): |
|
593 | class AuthLdapBase(object): | |
584 |
|
594 | |||
585 | @classmethod |
|
595 | @classmethod | |
586 | def _build_servers(cls, ldap_server_type, ldap_server, port, use_resolver=True): |
|
596 | def _build_servers(cls, ldap_server_type, ldap_server, port, use_resolver=True): | |
587 |
|
597 | |||
588 | def host_resolver(host, port, full_resolve=True): |
|
598 | def host_resolver(host, port, full_resolve=True): | |
589 | """ |
|
599 | """ | |
590 | Main work for this function is to prevent ldap connection issues, |
|
600 | Main work for this function is to prevent ldap connection issues, | |
591 | and detect them early using a "greenified" sockets |
|
601 | and detect them early using a "greenified" sockets | |
592 | """ |
|
602 | """ | |
593 | host = host.strip() |
|
603 | host = host.strip() | |
594 | if not full_resolve: |
|
604 | if not full_resolve: | |
595 | return '{}:{}'.format(host, port) |
|
605 | return '{}:{}'.format(host, port) | |
596 |
|
606 | |||
597 | log.debug('LDAP: Resolving IP for LDAP host %s', host) |
|
607 | log.debug('LDAP: Resolving IP for LDAP host %s', host) | |
598 | try: |
|
608 | try: | |
599 | ip = socket.gethostbyname(host) |
|
609 | ip = socket.gethostbyname(host) | |
600 | log.debug('Got LDAP server %s ip %s', host, ip) |
|
610 | log.debug('Got LDAP server %s ip %s', host, ip) | |
601 | except Exception: |
|
611 | except Exception: | |
602 | raise LdapConnectionError( |
|
612 | raise LdapConnectionError( | |
603 | 'Failed to resolve host: `{}`'.format(host)) |
|
613 | 'Failed to resolve host: `{}`'.format(host)) | |
604 |
|
614 | |||
605 | log.debug('LDAP: Checking if IP %s is accessible', ip) |
|
615 | log.debug('LDAP: Checking if IP %s is accessible', ip) | |
606 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
616 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | |
607 | try: |
|
617 | try: | |
608 | s.connect((ip, int(port))) |
|
618 | s.connect((ip, int(port))) | |
609 | s.shutdown(socket.SHUT_RD) |
|
619 | s.shutdown(socket.SHUT_RD) | |
610 | except Exception: |
|
620 | except Exception: | |
611 | raise LdapConnectionError( |
|
621 | raise LdapConnectionError( | |
612 | 'Failed to connect to host: `{}:{}`'.format(host, port)) |
|
622 | 'Failed to connect to host: `{}:{}`'.format(host, port)) | |
613 |
|
623 | |||
614 | return '{}:{}'.format(host, port) |
|
624 | return '{}:{}'.format(host, port) | |
615 |
|
625 | |||
616 | if len(ldap_server) == 1: |
|
626 | if len(ldap_server) == 1: | |
617 | # in case of single server use resolver to detect potential |
|
627 | # in case of single server use resolver to detect potential | |
618 | # connection issues |
|
628 | # connection issues | |
619 | full_resolve = True |
|
629 | full_resolve = True | |
620 | else: |
|
630 | else: | |
621 | full_resolve = False |
|
631 | full_resolve = False | |
622 |
|
632 | |||
623 | return ', '.join( |
|
633 | return ', '.join( | |
624 | ["{}://{}".format( |
|
634 | ["{}://{}".format( | |
625 | ldap_server_type, |
|
635 | ldap_server_type, | |
626 | host_resolver(host, port, full_resolve=use_resolver and full_resolve)) |
|
636 | host_resolver(host, port, full_resolve=use_resolver and full_resolve)) | |
627 | for host in ldap_server]) |
|
637 | for host in ldap_server]) | |
628 |
|
638 | |||
629 | @classmethod |
|
639 | @classmethod | |
630 | def _get_server_list(cls, servers): |
|
640 | def _get_server_list(cls, servers): | |
631 | return map(string.strip, servers.split(',')) |
|
641 | return map(string.strip, servers.split(',')) | |
632 |
|
642 | |||
633 | @classmethod |
|
643 | @classmethod | |
634 | def get_uid(cls, username, server_addresses): |
|
644 | def get_uid(cls, username, server_addresses): | |
635 | uid = username |
|
645 | uid = username | |
636 | for server_addr in server_addresses: |
|
646 | for server_addr in server_addresses: | |
637 | uid = chop_at(username, "@%s" % server_addr) |
|
647 | uid = chop_at(username, "@%s" % server_addr) | |
638 | return uid |
|
648 | return uid | |
639 |
|
649 | |||
640 | @classmethod |
|
650 | @classmethod | |
641 | def validate_username(cls, username): |
|
651 | def validate_username(cls, username): | |
642 | if "," in username: |
|
652 | if "," in username: | |
643 | raise LdapUsernameError( |
|
653 | raise LdapUsernameError( | |
644 | "invalid character `,` in username: `{}`".format(username)) |
|
654 | "invalid character `,` in username: `{}`".format(username)) | |
645 |
|
655 | |||
646 | @classmethod |
|
656 | @classmethod | |
647 | def validate_password(cls, username, password): |
|
657 | def validate_password(cls, username, password): | |
648 | if not password: |
|
658 | if not password: | |
649 | msg = "Authenticating user %s with blank password not allowed" |
|
659 | msg = "Authenticating user %s with blank password not allowed" | |
650 | log.warning(msg, username) |
|
660 | log.warning(msg, username) | |
651 | raise LdapPasswordError(msg) |
|
661 | raise LdapPasswordError(msg) | |
652 |
|
662 | |||
653 |
|
663 | |||
654 | def loadplugin(plugin_id): |
|
664 | def loadplugin(plugin_id): | |
655 | """ |
|
665 | """ | |
656 | Loads and returns an instantiated authentication plugin. |
|
666 | Loads and returns an instantiated authentication plugin. | |
657 | Returns the RhodeCodeAuthPluginBase subclass on success, |
|
667 | Returns the RhodeCodeAuthPluginBase subclass on success, | |
658 | or None on failure. |
|
668 | or None on failure. | |
659 | """ |
|
669 | """ | |
660 | # TODO: Disusing pyramids thread locals to retrieve the registry. |
|
670 | # TODO: Disusing pyramids thread locals to retrieve the registry. | |
661 | authn_registry = get_authn_registry() |
|
671 | authn_registry = get_authn_registry() | |
662 | plugin = authn_registry.get_plugin(plugin_id) |
|
672 | plugin = authn_registry.get_plugin(plugin_id) | |
663 | if plugin is None: |
|
673 | if plugin is None: | |
664 | log.error('Authentication plugin not found: "%s"', plugin_id) |
|
674 | log.error('Authentication plugin not found: "%s"', plugin_id) | |
665 | return plugin |
|
675 | return plugin | |
666 |
|
676 | |||
667 |
|
677 | |||
668 | def get_authn_registry(registry=None): |
|
678 | def get_authn_registry(registry=None): | |
669 | registry = registry or get_current_registry() |
|
679 | registry = registry or get_current_registry() | |
670 |
authn_registry = registry. |
|
680 | authn_registry = registry.queryUtility(IAuthnPluginRegistry) | |
671 | return authn_registry |
|
681 | return authn_registry | |
672 |
|
682 | |||
673 |
|
683 | |||
674 | def authenticate(username, password, environ=None, auth_type=None, |
|
684 | def authenticate(username, password, environ=None, auth_type=None, | |
675 | skip_missing=False, registry=None, acl_repo_name=None): |
|
685 | skip_missing=False, registry=None, acl_repo_name=None): | |
676 | """ |
|
686 | """ | |
677 | Authentication function used for access control, |
|
687 | Authentication function used for access control, | |
678 | It tries to authenticate based on enabled authentication modules. |
|
688 | It tries to authenticate based on enabled authentication modules. | |
679 |
|
689 | |||
680 | :param username: username can be empty for headers auth |
|
690 | :param username: username can be empty for headers auth | |
681 | :param password: password can be empty for headers auth |
|
691 | :param password: password can be empty for headers auth | |
682 | :param environ: environ headers passed for headers auth |
|
692 | :param environ: environ headers passed for headers auth | |
683 | :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE` |
|
693 | :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE` | |
684 | :param skip_missing: ignores plugins that are in db but not in environment |
|
694 | :param skip_missing: ignores plugins that are in db but not in environment | |
685 | :returns: None if auth failed, plugin_user dict if auth is correct |
|
695 | :returns: None if auth failed, plugin_user dict if auth is correct | |
686 | """ |
|
696 | """ | |
687 | if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]: |
|
697 | if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]: | |
688 | raise ValueError('auth type must be on of http, vcs got "%s" instead' |
|
698 | raise ValueError('auth type must be on of http, vcs got "%s" instead' | |
689 | % auth_type) |
|
699 | % auth_type) | |
690 | headers_only = environ and not (username and password) |
|
700 | headers_only = environ and not (username and password) | |
691 |
|
701 | |||
692 | authn_registry = get_authn_registry(registry) |
|
702 | authn_registry = get_authn_registry(registry) | |
|
703 | ||||
693 | plugins_to_check = authn_registry.get_plugins_for_authentication() |
|
704 | plugins_to_check = authn_registry.get_plugins_for_authentication() | |
694 | log.debug('Starting ordered authentication chain using %s plugins', |
|
705 | log.debug('Starting ordered authentication chain using %s plugins', | |
695 | [x.name for x in plugins_to_check]) |
|
706 | [x.name for x in plugins_to_check]) | |
696 | for plugin in plugins_to_check: |
|
707 | for plugin in plugins_to_check: | |
697 | plugin.set_auth_type(auth_type) |
|
708 | plugin.set_auth_type(auth_type) | |
698 | plugin.set_calling_scope_repo(acl_repo_name) |
|
709 | plugin.set_calling_scope_repo(acl_repo_name) | |
699 |
|
710 | |||
700 | if headers_only and not plugin.is_headers_auth: |
|
711 | if headers_only and not plugin.is_headers_auth: | |
701 | log.debug('Auth type is for headers only and plugin `%s` is not ' |
|
712 | log.debug('Auth type is for headers only and plugin `%s` is not ' | |
702 | 'headers plugin, skipping...', plugin.get_id()) |
|
713 | 'headers plugin, skipping...', plugin.get_id()) | |
703 | continue |
|
714 | continue | |
704 |
|
715 | |||
705 | log.debug('Trying authentication using ** %s **', plugin.get_id()) |
|
716 | log.debug('Trying authentication using ** %s **', plugin.get_id()) | |
706 |
|
717 | |||
707 | # load plugin settings from RhodeCode database |
|
718 | # load plugin settings from RhodeCode database | |
708 | plugin_settings = plugin.get_settings() |
|
719 | plugin_settings = plugin.get_settings() | |
709 | plugin_sanitized_settings = plugin.log_safe_settings(plugin_settings) |
|
720 | plugin_sanitized_settings = plugin.log_safe_settings(plugin_settings) | |
710 | log.debug('Plugin `%s` settings:%s', plugin.get_id(), plugin_sanitized_settings) |
|
721 | log.debug('Plugin `%s` settings:%s', plugin.get_id(), plugin_sanitized_settings) | |
711 |
|
722 | |||
712 | # use plugin's method of user extraction. |
|
723 | # use plugin's method of user extraction. | |
713 | user = plugin.get_user(username, environ=environ, |
|
724 | user = plugin.get_user(username, environ=environ, | |
714 | settings=plugin_settings) |
|
725 | settings=plugin_settings) | |
715 | display_user = user.username if user else username |
|
726 | display_user = user.username if user else username | |
716 | log.debug( |
|
727 | log.debug( | |
717 | 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user) |
|
728 | 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user) | |
718 |
|
729 | |||
719 | if not plugin.allows_authentication_from(user): |
|
730 | if not plugin.allows_authentication_from(user): | |
720 | log.debug('Plugin %s does not accept user `%s` for authentication', |
|
731 | log.debug('Plugin %s does not accept user `%s` for authentication', | |
721 | plugin.get_id(), display_user) |
|
732 | plugin.get_id(), display_user) | |
722 | continue |
|
733 | continue | |
723 | else: |
|
734 | else: | |
724 | log.debug('Plugin %s accepted user `%s` for authentication', |
|
735 | log.debug('Plugin %s accepted user `%s` for authentication', | |
725 | plugin.get_id(), display_user) |
|
736 | plugin.get_id(), display_user) | |
726 |
|
737 | |||
727 | log.info('Authenticating user `%s` using %s plugin', |
|
738 | log.info('Authenticating user `%s` using %s plugin', | |
728 | display_user, plugin.get_id()) |
|
739 | display_user, plugin.get_id()) | |
729 |
|
740 | |||
730 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) |
|
741 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) | |
731 |
|
742 | |||
732 | log.debug('AUTH_CACHE_TTL for plugin `%s` active: %s (TTL: %s)', |
|
743 | log.debug('AUTH_CACHE_TTL for plugin `%s` active: %s (TTL: %s)', | |
733 | plugin.get_id(), plugin_cache_active, cache_ttl) |
|
744 | plugin.get_id(), plugin_cache_active, cache_ttl) | |
734 |
|
745 | |||
735 | user_id = user.user_id if user else None |
|
746 | user_id = user.user_id if user else None | |
736 | # don't cache for empty users |
|
747 | # don't cache for empty users | |
737 | plugin_cache_active = plugin_cache_active and user_id |
|
748 | plugin_cache_active = plugin_cache_active and user_id | |
738 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) |
|
749 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) | |
739 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) |
|
750 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) | |
740 |
|
751 | |||
741 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
752 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |
742 | expiration_time=cache_ttl, |
|
753 | expiration_time=cache_ttl, | |
743 | condition=plugin_cache_active) |
|
754 | condition=plugin_cache_active) | |
744 | def compute_auth( |
|
755 | def compute_auth( | |
745 | cache_name, plugin_name, username, password): |
|
756 | cache_name, plugin_name, username, password): | |
746 |
|
757 | |||
747 | # _authenticate is a wrapper for .auth() method of plugin. |
|
758 | # _authenticate is a wrapper for .auth() method of plugin. | |
748 | # it checks if .auth() sends proper data. |
|
759 | # it checks if .auth() sends proper data. | |
749 | # For RhodeCodeExternalAuthPlugin it also maps users to |
|
760 | # For RhodeCodeExternalAuthPlugin it also maps users to | |
750 | # Database and maps the attributes returned from .auth() |
|
761 | # Database and maps the attributes returned from .auth() | |
751 | # to RhodeCode database. If this function returns data |
|
762 | # to RhodeCode database. If this function returns data | |
752 | # then auth is correct. |
|
763 | # then auth is correct. | |
753 | log.debug('Running plugin `%s` _authenticate method ' |
|
764 | log.debug('Running plugin `%s` _authenticate method ' | |
754 | 'using username and password', plugin.get_id()) |
|
765 | 'using username and password', plugin.get_id()) | |
755 | return plugin._authenticate( |
|
766 | return plugin._authenticate( | |
756 | user, username, password, plugin_settings, |
|
767 | user, username, password, plugin_settings, | |
757 | environ=environ or {}) |
|
768 | environ=environ or {}) | |
758 |
|
769 | |||
759 | start = time.time() |
|
770 | start = time.time() | |
760 | # for environ based auth, password can be empty, but then the validation is |
|
771 | # for environ based auth, password can be empty, but then the validation is | |
761 | # on the server that fills in the env data needed for authentication |
|
772 | # on the server that fills in the env data needed for authentication | |
762 | plugin_user = compute_auth('auth', plugin.name, username, (password or '')) |
|
773 | plugin_user = compute_auth('auth', plugin.name, username, (password or '')) | |
763 |
|
774 | |||
764 | auth_time = time.time() - start |
|
775 | auth_time = time.time() - start | |
765 | log.debug('Authentication for plugin `%s` completed in %.4fs, ' |
|
776 | log.debug('Authentication for plugin `%s` completed in %.4fs, ' | |
766 | 'expiration time of fetched cache %.1fs.', |
|
777 | 'expiration time of fetched cache %.1fs.', | |
767 | plugin.get_id(), auth_time, cache_ttl) |
|
778 | plugin.get_id(), auth_time, cache_ttl) | |
768 |
|
779 | |||
769 | log.debug('PLUGIN USER DATA: %s', plugin_user) |
|
780 | log.debug('PLUGIN USER DATA: %s', plugin_user) | |
770 |
|
781 | |||
771 | if plugin_user: |
|
782 | if plugin_user: | |
772 | log.debug('Plugin returned proper authentication data') |
|
783 | log.debug('Plugin returned proper authentication data') | |
773 | return plugin_user |
|
784 | return plugin_user | |
774 | # we failed to Auth because .auth() method didn't return proper user |
|
785 | # we failed to Auth because .auth() method didn't return proper user | |
775 | log.debug("User `%s` failed to authenticate against %s", |
|
786 | log.debug("User `%s` failed to authenticate against %s", | |
776 | display_user, plugin.get_id()) |
|
787 | display_user, plugin.get_id()) | |
777 |
|
788 | |||
778 | # case when we failed to authenticate against all defined plugins |
|
789 | # case when we failed to authenticate against all defined plugins | |
779 | return None |
|
790 | return None | |
780 |
|
791 | |||
781 |
|
792 | |||
782 | def chop_at(s, sub, inclusive=False): |
|
793 | def chop_at(s, sub, inclusive=False): | |
783 | """Truncate string ``s`` at the first occurrence of ``sub``. |
|
794 | """Truncate string ``s`` at the first occurrence of ``sub``. | |
784 |
|
795 | |||
785 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
796 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. | |
786 |
|
797 | |||
787 | >>> chop_at("plutocratic brats", "rat") |
|
798 | >>> chop_at("plutocratic brats", "rat") | |
788 | 'plutoc' |
|
799 | 'plutoc' | |
789 | >>> chop_at("plutocratic brats", "rat", True) |
|
800 | >>> chop_at("plutocratic brats", "rat", True) | |
790 | 'plutocrat' |
|
801 | 'plutocrat' | |
791 | """ |
|
802 | """ | |
792 | pos = s.find(sub) |
|
803 | pos = s.find(sub) | |
793 | if pos == -1: |
|
804 | if pos == -1: | |
794 | return s |
|
805 | return s | |
795 | if inclusive: |
|
806 | if inclusive: | |
796 | return s[:pos+len(sub)] |
|
807 | return s[:pos+len(sub)] | |
797 | return s[:pos] |
|
808 | return s[:pos] |
@@ -1,94 +1,107 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from pyramid.exceptions import ConfigurationError |
|
23 | from pyramid.exceptions import ConfigurationError | |
24 | from zope.interface import implementer |
|
24 | from zope.interface import implementer | |
25 |
|
25 | |||
26 | from rhodecode.authentication.interface import IAuthnPluginRegistry |
|
26 | from rhodecode.authentication.interface import IAuthnPluginRegistry | |
27 | from rhodecode.lib.utils2 import safe_str |
|
27 | from rhodecode.lib.utils2 import safe_str | |
28 | from rhodecode.model.settings import SettingsModel |
|
28 | from rhodecode.model.settings import SettingsModel | |
29 |
|
29 | |||
30 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | @implementer(IAuthnPluginRegistry) |
|
33 | @implementer(IAuthnPluginRegistry) | |
34 | class AuthenticationPluginRegistry(object): |
|
34 | class AuthenticationPluginRegistry(object): | |
35 |
|
35 | |||
36 | # INI settings key to set a fallback authentication plugin. |
|
36 | # INI settings key to set a fallback authentication plugin. | |
37 | fallback_plugin_key = 'rhodecode.auth_plugin_fallback' |
|
37 | fallback_plugin_key = 'rhodecode.auth_plugin_fallback' | |
38 |
|
38 | |||
39 | def __init__(self, settings): |
|
39 | def __init__(self, settings): | |
40 | self._plugins = {} |
|
40 | self._plugins = {} | |
|
41 | self._plugins_for_auth = None | |||
41 | self._fallback_plugin = settings.get(self.fallback_plugin_key, None) |
|
42 | self._fallback_plugin = settings.get(self.fallback_plugin_key, None) | |
42 |
|
43 | |||
43 | def add_authn_plugin(self, config, plugin): |
|
44 | def add_authn_plugin(self, config, plugin): | |
44 | plugin_id = plugin.get_id() |
|
45 | plugin_id = plugin.get_id() | |
45 | if plugin_id in self._plugins.keys(): |
|
46 | if plugin_id in self._plugins.keys(): | |
46 | raise ConfigurationError( |
|
47 | raise ConfigurationError( | |
47 | 'Cannot register authentication plugin twice: "%s"', plugin_id) |
|
48 | 'Cannot register authentication plugin twice: "%s"', plugin_id) | |
48 | else: |
|
49 | else: | |
49 | log.debug('Register authentication plugin: "%s"', plugin_id) |
|
50 | log.debug('Register authentication plugin: "%s"', plugin_id) | |
50 | self._plugins[plugin_id] = plugin |
|
51 | self._plugins[plugin_id] = plugin | |
51 |
|
52 | |||
52 | def get_plugins(self): |
|
53 | def get_plugins(self): | |
53 | def sort_key(plugin): |
|
54 | def sort_key(plugin): | |
54 | return str.lower(safe_str(plugin.get_display_name())) |
|
55 | return str.lower(safe_str(plugin.get_display_name())) | |
55 |
|
56 | |||
56 | return sorted(self._plugins.values(), key=sort_key) |
|
57 | return sorted(self._plugins.values(), key=sort_key) | |
57 |
|
58 | |||
58 | def get_plugin(self, plugin_id): |
|
59 | def get_plugin(self, plugin_id): | |
59 | return self._plugins.get(plugin_id, None) |
|
60 | return self._plugins.get(plugin_id, None) | |
60 |
|
61 | |||
61 | def get_plugin_by_uid(self, plugin_uid): |
|
62 | def get_plugin_by_uid(self, plugin_uid): | |
62 | for plugin in self._plugins.values(): |
|
63 | for plugin in self._plugins.values(): | |
63 | if plugin.uid == plugin_uid: |
|
64 | if plugin.uid == plugin_uid: | |
64 | return plugin |
|
65 | return plugin | |
65 |
|
66 | |||
|
67 | def invalidate_plugins_for_auth(self): | |||
|
68 | log.debug('Invalidating cached plugins for authentication') | |||
|
69 | self._plugins_for_auth = None | |||
|
70 | ||||
66 | def get_plugins_for_authentication(self): |
|
71 | def get_plugins_for_authentication(self): | |
67 | """ |
|
72 | """ | |
68 | Returns a list of plugins which should be consulted when authenticating |
|
73 | Returns a list of plugins which should be consulted when authenticating | |
69 | a user. It only returns plugins which are enabled and active. |
|
74 | a user. It only returns plugins which are enabled and active. | |
70 | Additionally it includes the fallback plugin from the INI file, if |
|
75 | Additionally it includes the fallback plugin from the INI file, if | |
71 | `rhodecode.auth_plugin_fallback` is set to a plugin ID. |
|
76 | `rhodecode.auth_plugin_fallback` is set to a plugin ID. | |
72 | """ |
|
77 | """ | |
|
78 | if self._plugins_for_auth is not None: | |||
|
79 | return self._plugins_for_auth | |||
|
80 | ||||
73 | plugins = [] |
|
81 | plugins = [] | |
74 |
|
82 | |||
75 | # Add all enabled and active plugins to the list. We iterate over the |
|
83 | # Add all enabled and active plugins to the list. We iterate over the | |
76 | # auth_plugins setting from DB because it also represents the ordering. |
|
84 | # auth_plugins setting from DB because it also represents the ordering. | |
77 | enabled_plugins = SettingsModel().get_auth_plugins() |
|
85 | enabled_plugins = SettingsModel().get_auth_plugins() | |
78 | raw_settings = SettingsModel().get_all_settings() |
|
86 | raw_settings = SettingsModel().get_all_settings() | |
79 | for plugin_id in enabled_plugins: |
|
87 | for plugin_id in enabled_plugins: | |
80 | plugin = self.get_plugin(plugin_id) |
|
88 | plugin = self.get_plugin(plugin_id) | |
81 | if plugin is not None and plugin.is_active( |
|
89 | if plugin is not None and plugin.is_active( | |
82 | plugin_cached_settings=raw_settings): |
|
90 | plugin_cached_settings=raw_settings): | |
|
91 | ||||
|
92 | # inject settings into plugin, we can re-use the DB fetched settings here | |||
|
93 | plugin._settings = plugin._propagate_settings(raw_settings) | |||
83 | plugins.append(plugin) |
|
94 | plugins.append(plugin) | |
84 |
|
95 | |||
85 | # Add the fallback plugin from ini file. |
|
96 | # Add the fallback plugin from ini file. | |
86 | if self._fallback_plugin: |
|
97 | if self._fallback_plugin: | |
87 | log.warn( |
|
98 | log.warn( | |
88 | 'Using fallback authentication plugin from INI file: "%s"', |
|
99 | 'Using fallback authentication plugin from INI file: "%s"', | |
89 | self._fallback_plugin) |
|
100 | self._fallback_plugin) | |
90 | plugin = self.get_plugin(self._fallback_plugin) |
|
101 | plugin = self.get_plugin(self._fallback_plugin) | |
91 | if plugin is not None and plugin not in plugins: |
|
102 | if plugin is not None and plugin not in plugins: | |
|
103 | plugin._settings = plugin._propagate_settings(raw_settings) | |||
92 | plugins.append(plugin) |
|
104 | plugins.append(plugin) | |
93 |
|
105 | |||
94 | return plugins |
|
106 | self._plugins_for_auth = plugins | |
|
107 | return self._plugins_for_auth |
@@ -1,179 +1,180 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
21 | import colander | |
22 | import formencode.htmlfill |
|
22 | import formencode.htmlfill | |
23 | import logging |
|
23 | import logging | |
24 |
|
24 | |||
25 | from pyramid.httpexceptions import HTTPFound |
|
25 | from pyramid.httpexceptions import HTTPFound | |
26 | from pyramid.renderers import render |
|
26 | from pyramid.renderers import render | |
27 | from pyramid.response import Response |
|
27 | from pyramid.response import Response | |
28 |
|
28 | |||
29 | from rhodecode.apps._base import BaseAppView |
|
29 | from rhodecode.apps._base import BaseAppView | |
30 | from rhodecode.authentication.base import get_authn_registry |
|
30 | from rhodecode.authentication.base import get_authn_registry | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib.auth import ( |
|
32 | from rhodecode.lib.auth import ( | |
33 | LoginRequired, HasPermissionAllDecorator, CSRFRequired) |
|
33 | LoginRequired, HasPermissionAllDecorator, CSRFRequired) | |
34 | from rhodecode.model.forms import AuthSettingsForm |
|
34 | from rhodecode.model.forms import AuthSettingsForm | |
35 | from rhodecode.model.meta import Session |
|
35 | from rhodecode.model.meta import Session | |
36 | from rhodecode.model.settings import SettingsModel |
|
36 | from rhodecode.model.settings import SettingsModel | |
37 |
|
37 | |||
38 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class AuthnPluginViewBase(BaseAppView): |
|
41 | class AuthnPluginViewBase(BaseAppView): | |
42 |
|
42 | |||
43 | def load_default_context(self): |
|
43 | def load_default_context(self): | |
44 | c = self._get_local_tmpl_context() |
|
44 | c = self._get_local_tmpl_context() | |
45 | self.plugin = self.context.plugin |
|
45 | self.plugin = self.context.plugin | |
46 | return c |
|
46 | return c | |
47 |
|
47 | |||
48 | @LoginRequired() |
|
48 | @LoginRequired() | |
49 | @HasPermissionAllDecorator('hg.admin') |
|
49 | @HasPermissionAllDecorator('hg.admin') | |
50 | def settings_get(self, defaults=None, errors=None): |
|
50 | def settings_get(self, defaults=None, errors=None): | |
51 | """ |
|
51 | """ | |
52 | View that displays the plugin settings as a form. |
|
52 | View that displays the plugin settings as a form. | |
53 | """ |
|
53 | """ | |
54 | c = self.load_default_context() |
|
54 | c = self.load_default_context() | |
55 | defaults = defaults or {} |
|
55 | defaults = defaults or {} | |
56 | errors = errors or {} |
|
56 | errors = errors or {} | |
57 | schema = self.plugin.get_settings_schema() |
|
57 | schema = self.plugin.get_settings_schema() | |
58 |
|
58 | |||
59 | # Compute default values for the form. Priority is: |
|
59 | # Compute default values for the form. Priority is: | |
60 | # 1. Passed to this method 2. DB value 3. Schema default |
|
60 | # 1. Passed to this method 2. DB value 3. Schema default | |
61 | for node in schema: |
|
61 | for node in schema: | |
62 | if node.name not in defaults: |
|
62 | if node.name not in defaults: | |
63 | defaults[node.name] = self.plugin.get_setting_by_name( |
|
63 | defaults[node.name] = self.plugin.get_setting_by_name( | |
64 | node.name, node.default) |
|
64 | node.name, node.default) | |
65 |
|
65 | |||
66 | template_context = { |
|
66 | template_context = { | |
67 | 'defaults': defaults, |
|
67 | 'defaults': defaults, | |
68 | 'errors': errors, |
|
68 | 'errors': errors, | |
69 | 'plugin': self.context.plugin, |
|
69 | 'plugin': self.context.plugin, | |
70 | 'resource': self.context, |
|
70 | 'resource': self.context, | |
71 | } |
|
71 | } | |
72 |
|
72 | |||
73 | return self._get_template_context(c, **template_context) |
|
73 | return self._get_template_context(c, **template_context) | |
74 |
|
74 | |||
75 | @LoginRequired() |
|
75 | @LoginRequired() | |
76 | @HasPermissionAllDecorator('hg.admin') |
|
76 | @HasPermissionAllDecorator('hg.admin') | |
77 | @CSRFRequired() |
|
77 | @CSRFRequired() | |
78 | def settings_post(self): |
|
78 | def settings_post(self): | |
79 | """ |
|
79 | """ | |
80 | View that validates and stores the plugin settings. |
|
80 | View that validates and stores the plugin settings. | |
81 | """ |
|
81 | """ | |
82 | _ = self.request.translate |
|
82 | _ = self.request.translate | |
83 | self.load_default_context() |
|
83 | self.load_default_context() | |
84 | schema = self.plugin.get_settings_schema() |
|
84 | schema = self.plugin.get_settings_schema() | |
85 | data = self.request.params |
|
85 | data = self.request.params | |
86 |
|
86 | |||
87 | try: |
|
87 | try: | |
88 | valid_data = schema.deserialize(data) |
|
88 | valid_data = schema.deserialize(data) | |
89 | except colander.Invalid as e: |
|
89 | except colander.Invalid as e: | |
90 | # Display error message and display form again. |
|
90 | # Display error message and display form again. | |
91 | h.flash( |
|
91 | h.flash( | |
92 | _('Errors exist when saving plugin settings. ' |
|
92 | _('Errors exist when saving plugin settings. ' | |
93 | 'Please check the form inputs.'), |
|
93 | 'Please check the form inputs.'), | |
94 | category='error') |
|
94 | category='error') | |
95 | defaults = {key: data[key] for key in data if key in schema} |
|
95 | defaults = {key: data[key] for key in data if key in schema} | |
96 | return self.settings_get(errors=e.asdict(), defaults=defaults) |
|
96 | return self.settings_get(errors=e.asdict(), defaults=defaults) | |
97 |
|
97 | |||
98 | # Store validated data. |
|
98 | # Store validated data. | |
99 | for name, value in valid_data.items(): |
|
99 | for name, value in valid_data.items(): | |
100 | self.plugin.create_or_update_setting(name, value) |
|
100 | self.plugin.create_or_update_setting(name, value) | |
101 | Session().commit() |
|
101 | Session().commit() | |
|
102 | SettingsModel().invalidate_settings_cache() | |||
102 |
|
103 | |||
103 | # Display success message and redirect. |
|
104 | # Display success message and redirect. | |
104 | h.flash(_('Auth settings updated successfully.'), category='success') |
|
105 | h.flash(_('Auth settings updated successfully.'), category='success') | |
105 | redirect_to = self.request.resource_path( |
|
106 | redirect_to = self.request.resource_path(self.context, route_name='auth_home') | |
106 | self.context, route_name='auth_home') |
|
107 | ||
107 | return HTTPFound(redirect_to) |
|
108 | return HTTPFound(redirect_to) | |
108 |
|
109 | |||
109 |
|
110 | |||
110 | class AuthSettingsView(BaseAppView): |
|
111 | class AuthSettingsView(BaseAppView): | |
111 | def load_default_context(self): |
|
112 | def load_default_context(self): | |
112 | c = self._get_local_tmpl_context() |
|
113 | c = self._get_local_tmpl_context() | |
113 | return c |
|
114 | return c | |
114 |
|
115 | |||
115 | @LoginRequired() |
|
116 | @LoginRequired() | |
116 | @HasPermissionAllDecorator('hg.admin') |
|
117 | @HasPermissionAllDecorator('hg.admin') | |
117 | def index(self, defaults=None, errors=None, prefix_error=False): |
|
118 | def index(self, defaults=None, errors=None, prefix_error=False): | |
118 | c = self.load_default_context() |
|
119 | c = self.load_default_context() | |
119 |
|
120 | |||
120 | defaults = defaults or {} |
|
121 | defaults = defaults or {} | |
121 | authn_registry = get_authn_registry(self.request.registry) |
|
122 | authn_registry = get_authn_registry(self.request.registry) | |
122 | enabled_plugins = SettingsModel().get_auth_plugins() |
|
123 | enabled_plugins = SettingsModel().get_auth_plugins() | |
123 |
|
124 | |||
124 | # Create template context and render it. |
|
125 | # Create template context and render it. | |
125 | template_context = { |
|
126 | template_context = { | |
126 | 'resource': self.context, |
|
127 | 'resource': self.context, | |
127 | 'available_plugins': authn_registry.get_plugins(), |
|
128 | 'available_plugins': authn_registry.get_plugins(), | |
128 | 'enabled_plugins': enabled_plugins, |
|
129 | 'enabled_plugins': enabled_plugins, | |
129 | } |
|
130 | } | |
130 | html = render('rhodecode:templates/admin/auth/auth_settings.mako', |
|
131 | html = render('rhodecode:templates/admin/auth/auth_settings.mako', | |
131 | self._get_template_context(c, **template_context), |
|
132 | self._get_template_context(c, **template_context), | |
132 | self.request) |
|
133 | self.request) | |
133 |
|
134 | |||
134 | # Create form default values and fill the form. |
|
135 | # Create form default values and fill the form. | |
135 | form_defaults = { |
|
136 | form_defaults = { | |
136 | 'auth_plugins': ',\n'.join(enabled_plugins) |
|
137 | 'auth_plugins': ',\n'.join(enabled_plugins) | |
137 | } |
|
138 | } | |
138 | form_defaults.update(defaults) |
|
139 | form_defaults.update(defaults) | |
139 | html = formencode.htmlfill.render( |
|
140 | html = formencode.htmlfill.render( | |
140 | html, |
|
141 | html, | |
141 | defaults=form_defaults, |
|
142 | defaults=form_defaults, | |
142 | errors=errors, |
|
143 | errors=errors, | |
143 | prefix_error=prefix_error, |
|
144 | prefix_error=prefix_error, | |
144 | encoding="UTF-8", |
|
145 | encoding="UTF-8", | |
145 | force_defaults=False) |
|
146 | force_defaults=False) | |
146 |
|
147 | |||
147 | return Response(html) |
|
148 | return Response(html) | |
148 |
|
149 | |||
149 | @LoginRequired() |
|
150 | @LoginRequired() | |
150 | @HasPermissionAllDecorator('hg.admin') |
|
151 | @HasPermissionAllDecorator('hg.admin') | |
151 | @CSRFRequired() |
|
152 | @CSRFRequired() | |
152 | def auth_settings(self): |
|
153 | def auth_settings(self): | |
153 | _ = self.request.translate |
|
154 | _ = self.request.translate | |
154 | try: |
|
155 | try: | |
155 | form = AuthSettingsForm(self.request.translate)() |
|
156 | form = AuthSettingsForm(self.request.translate)() | |
156 | form_result = form.to_python(self.request.POST) |
|
157 | form_result = form.to_python(self.request.POST) | |
157 | plugins = ','.join(form_result['auth_plugins']) |
|
158 | plugins = ','.join(form_result['auth_plugins']) | |
158 | setting = SettingsModel().create_or_update_setting( |
|
159 | setting = SettingsModel().create_or_update_setting( | |
159 | 'auth_plugins', plugins) |
|
160 | 'auth_plugins', plugins) | |
160 | Session().add(setting) |
|
161 | Session().add(setting) | |
161 | Session().commit() |
|
162 | Session().commit() | |
162 |
|
163 | SettingsModel().invalidate_settings_cache() | ||
163 | h.flash(_('Auth settings updated successfully.'), category='success') |
|
164 | h.flash(_('Auth settings updated successfully.'), category='success') | |
164 | except formencode.Invalid as errors: |
|
165 | except formencode.Invalid as errors: | |
165 | e = errors.error_dict or {} |
|
166 | e = errors.error_dict or {} | |
166 | h.flash(_('Errors exist when saving plugin setting. ' |
|
167 | h.flash(_('Errors exist when saving plugin setting. ' | |
167 | 'Please check the form inputs.'), category='error') |
|
168 | 'Please check the form inputs.'), category='error') | |
168 | return self.index( |
|
169 | return self.index( | |
169 | defaults=errors.value, |
|
170 | defaults=errors.value, | |
170 | errors=e, |
|
171 | errors=e, | |
171 | prefix_error=False) |
|
172 | prefix_error=False) | |
172 | except Exception: |
|
173 | except Exception: | |
173 | log.exception('Exception in auth_settings') |
|
174 | log.exception('Exception in auth_settings') | |
174 | h.flash(_('Error occurred during update of auth settings.'), |
|
175 | h.flash(_('Error occurred during update of auth settings.'), | |
175 | category='error') |
|
176 | category='error') | |
176 |
|
177 | |||
177 | redirect_to = self.request.resource_path( |
|
178 | redirect_to = self.request.resource_path(self.context, route_name='auth_home') | |
178 | self.context, route_name='auth_home') |
|
179 | ||
179 | return HTTPFound(redirect_to) |
|
180 | return HTTPFound(redirect_to) |
@@ -1,615 +1,616 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | The base Controller API |
|
22 | The base Controller API | |
23 | Provides the BaseController class for subclassing. And usage in different |
|
23 | Provides the BaseController class for subclassing. And usage in different | |
24 | controllers |
|
24 | controllers | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | import logging |
|
27 | import logging | |
28 | import socket |
|
28 | import socket | |
29 |
|
29 | |||
30 | import markupsafe |
|
30 | import markupsafe | |
31 | import ipaddress |
|
31 | import ipaddress | |
32 |
|
32 | |||
33 | from paste.auth.basic import AuthBasicAuthenticator |
|
33 | from paste.auth.basic import AuthBasicAuthenticator | |
34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception | |
35 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION |
|
35 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION | |
36 |
|
36 | |||
37 | import rhodecode |
|
37 | import rhodecode | |
38 | from rhodecode.apps._base import TemplateArgs |
|
38 | from rhodecode.apps._base import TemplateArgs | |
39 | from rhodecode.authentication.base import VCS_TYPE |
|
39 | from rhodecode.authentication.base import VCS_TYPE | |
40 | from rhodecode.lib import auth, utils2 |
|
40 | from rhodecode.lib import auth, utils2 | |
41 | from rhodecode.lib import helpers as h |
|
41 | from rhodecode.lib import helpers as h | |
42 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
42 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper | |
43 | from rhodecode.lib.exceptions import UserCreationError |
|
43 | from rhodecode.lib.exceptions import UserCreationError | |
44 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) |
|
44 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) | |
45 | from rhodecode.lib.utils2 import ( |
|
45 | from rhodecode.lib.utils2 import ( | |
46 | str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str) |
|
46 | str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str) | |
47 | from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark |
|
47 | from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark | |
48 | from rhodecode.model.notification import NotificationModel |
|
48 | from rhodecode.model.notification import NotificationModel | |
49 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
49 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel | |
50 |
|
50 | |||
51 | log = logging.getLogger(__name__) |
|
51 | log = logging.getLogger(__name__) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | def _filter_proxy(ip): |
|
54 | def _filter_proxy(ip): | |
55 | """ |
|
55 | """ | |
56 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
56 | Passed in IP addresses in HEADERS can be in a special format of multiple | |
57 | ips. Those comma separated IPs are passed from various proxies in the |
|
57 | ips. Those comma separated IPs are passed from various proxies in the | |
58 | chain of request processing. The left-most being the original client. |
|
58 | chain of request processing. The left-most being the original client. | |
59 | We only care about the first IP which came from the org. client. |
|
59 | We only care about the first IP which came from the org. client. | |
60 |
|
60 | |||
61 | :param ip: ip string from headers |
|
61 | :param ip: ip string from headers | |
62 | """ |
|
62 | """ | |
63 | if ',' in ip: |
|
63 | if ',' in ip: | |
64 | _ips = ip.split(',') |
|
64 | _ips = ip.split(',') | |
65 | _first_ip = _ips[0].strip() |
|
65 | _first_ip = _ips[0].strip() | |
66 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
66 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) | |
67 | return _first_ip |
|
67 | return _first_ip | |
68 | return ip |
|
68 | return ip | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def _filter_port(ip): |
|
71 | def _filter_port(ip): | |
72 | """ |
|
72 | """ | |
73 | Removes a port from ip, there are 4 main cases to handle here. |
|
73 | Removes a port from ip, there are 4 main cases to handle here. | |
74 | - ipv4 eg. 127.0.0.1 |
|
74 | - ipv4 eg. 127.0.0.1 | |
75 | - ipv6 eg. ::1 |
|
75 | - ipv6 eg. ::1 | |
76 | - ipv4+port eg. 127.0.0.1:8080 |
|
76 | - ipv4+port eg. 127.0.0.1:8080 | |
77 | - ipv6+port eg. [::1]:8080 |
|
77 | - ipv6+port eg. [::1]:8080 | |
78 |
|
78 | |||
79 | :param ip: |
|
79 | :param ip: | |
80 | """ |
|
80 | """ | |
81 | def is_ipv6(ip_addr): |
|
81 | def is_ipv6(ip_addr): | |
82 | if hasattr(socket, 'inet_pton'): |
|
82 | if hasattr(socket, 'inet_pton'): | |
83 | try: |
|
83 | try: | |
84 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
84 | socket.inet_pton(socket.AF_INET6, ip_addr) | |
85 | except socket.error: |
|
85 | except socket.error: | |
86 | return False |
|
86 | return False | |
87 | else: |
|
87 | else: | |
88 | # fallback to ipaddress |
|
88 | # fallback to ipaddress | |
89 | try: |
|
89 | try: | |
90 | ipaddress.IPv6Address(safe_unicode(ip_addr)) |
|
90 | ipaddress.IPv6Address(safe_unicode(ip_addr)) | |
91 | except Exception: |
|
91 | except Exception: | |
92 | return False |
|
92 | return False | |
93 | return True |
|
93 | return True | |
94 |
|
94 | |||
95 | if ':' not in ip: # must be ipv4 pure ip |
|
95 | if ':' not in ip: # must be ipv4 pure ip | |
96 | return ip |
|
96 | return ip | |
97 |
|
97 | |||
98 | if '[' in ip and ']' in ip: # ipv6 with port |
|
98 | if '[' in ip and ']' in ip: # ipv6 with port | |
99 | return ip.split(']')[0][1:].lower() |
|
99 | return ip.split(']')[0][1:].lower() | |
100 |
|
100 | |||
101 | # must be ipv6 or ipv4 with port |
|
101 | # must be ipv6 or ipv4 with port | |
102 | if is_ipv6(ip): |
|
102 | if is_ipv6(ip): | |
103 | return ip |
|
103 | return ip | |
104 | else: |
|
104 | else: | |
105 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
105 | ip, _port = ip.split(':')[:2] # means ipv4+port | |
106 | return ip |
|
106 | return ip | |
107 |
|
107 | |||
108 |
|
108 | |||
109 | def get_ip_addr(environ): |
|
109 | def get_ip_addr(environ): | |
110 | proxy_key = 'HTTP_X_REAL_IP' |
|
110 | proxy_key = 'HTTP_X_REAL_IP' | |
111 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
111 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' | |
112 | def_key = 'REMOTE_ADDR' |
|
112 | def_key = 'REMOTE_ADDR' | |
113 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
113 | _filters = lambda x: _filter_port(_filter_proxy(x)) | |
114 |
|
114 | |||
115 | ip = environ.get(proxy_key) |
|
115 | ip = environ.get(proxy_key) | |
116 | if ip: |
|
116 | if ip: | |
117 | return _filters(ip) |
|
117 | return _filters(ip) | |
118 |
|
118 | |||
119 | ip = environ.get(proxy_key2) |
|
119 | ip = environ.get(proxy_key2) | |
120 | if ip: |
|
120 | if ip: | |
121 | return _filters(ip) |
|
121 | return _filters(ip) | |
122 |
|
122 | |||
123 | ip = environ.get(def_key, '0.0.0.0') |
|
123 | ip = environ.get(def_key, '0.0.0.0') | |
124 | return _filters(ip) |
|
124 | return _filters(ip) | |
125 |
|
125 | |||
126 |
|
126 | |||
127 | def get_server_ip_addr(environ, log_errors=True): |
|
127 | def get_server_ip_addr(environ, log_errors=True): | |
128 | hostname = environ.get('SERVER_NAME') |
|
128 | hostname = environ.get('SERVER_NAME') | |
129 | try: |
|
129 | try: | |
130 | return socket.gethostbyname(hostname) |
|
130 | return socket.gethostbyname(hostname) | |
131 | except Exception as e: |
|
131 | except Exception as e: | |
132 | if log_errors: |
|
132 | if log_errors: | |
133 | # in some cases this lookup is not possible, and we don't want to |
|
133 | # in some cases this lookup is not possible, and we don't want to | |
134 | # make it an exception in logs |
|
134 | # make it an exception in logs | |
135 | log.exception('Could not retrieve server ip address: %s', e) |
|
135 | log.exception('Could not retrieve server ip address: %s', e) | |
136 | return hostname |
|
136 | return hostname | |
137 |
|
137 | |||
138 |
|
138 | |||
139 | def get_server_port(environ): |
|
139 | def get_server_port(environ): | |
140 | return environ.get('SERVER_PORT') |
|
140 | return environ.get('SERVER_PORT') | |
141 |
|
141 | |||
142 |
|
142 | |||
143 | def get_access_path(environ): |
|
143 | def get_access_path(environ): | |
144 | path = environ.get('PATH_INFO') |
|
144 | path = environ.get('PATH_INFO') | |
145 | org_req = environ.get('pylons.original_request') |
|
145 | org_req = environ.get('pylons.original_request') | |
146 | if org_req: |
|
146 | if org_req: | |
147 | path = org_req.environ.get('PATH_INFO') |
|
147 | path = org_req.environ.get('PATH_INFO') | |
148 | return path |
|
148 | return path | |
149 |
|
149 | |||
150 |
|
150 | |||
151 | def get_user_agent(environ): |
|
151 | def get_user_agent(environ): | |
152 | return environ.get('HTTP_USER_AGENT') |
|
152 | return environ.get('HTTP_USER_AGENT') | |
153 |
|
153 | |||
154 |
|
154 | |||
155 | def vcs_operation_context( |
|
155 | def vcs_operation_context( | |
156 | environ, repo_name, username, action, scm, check_locking=True, |
|
156 | environ, repo_name, username, action, scm, check_locking=True, | |
157 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): |
|
157 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): | |
158 | """ |
|
158 | """ | |
159 | Generate the context for a vcs operation, e.g. push or pull. |
|
159 | Generate the context for a vcs operation, e.g. push or pull. | |
160 |
|
160 | |||
161 | This context is passed over the layers so that hooks triggered by the |
|
161 | This context is passed over the layers so that hooks triggered by the | |
162 | vcs operation know details like the user, the user's IP address etc. |
|
162 | vcs operation know details like the user, the user's IP address etc. | |
163 |
|
163 | |||
164 | :param check_locking: Allows to switch of the computation of the locking |
|
164 | :param check_locking: Allows to switch of the computation of the locking | |
165 | data. This serves mainly the need of the simplevcs middleware to be |
|
165 | data. This serves mainly the need of the simplevcs middleware to be | |
166 | able to disable this for certain operations. |
|
166 | able to disable this for certain operations. | |
167 |
|
167 | |||
168 | """ |
|
168 | """ | |
169 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
169 | # Tri-state value: False: unlock, None: nothing, True: lock | |
170 | make_lock = None |
|
170 | make_lock = None | |
171 | locked_by = [None, None, None] |
|
171 | locked_by = [None, None, None] | |
172 | is_anonymous = username == User.DEFAULT_USER |
|
172 | is_anonymous = username == User.DEFAULT_USER | |
173 | user = User.get_by_username(username) |
|
173 | user = User.get_by_username(username) | |
174 | if not is_anonymous and check_locking: |
|
174 | if not is_anonymous and check_locking: | |
175 | log.debug('Checking locking on repository "%s"', repo_name) |
|
175 | log.debug('Checking locking on repository "%s"', repo_name) | |
176 | repo = Repository.get_by_repo_name(repo_name) |
|
176 | repo = Repository.get_by_repo_name(repo_name) | |
177 | make_lock, __, locked_by = repo.get_locking_state( |
|
177 | make_lock, __, locked_by = repo.get_locking_state( | |
178 | action, user.user_id) |
|
178 | action, user.user_id) | |
179 | user_id = user.user_id |
|
179 | user_id = user.user_id | |
180 | settings_model = VcsSettingsModel(repo=repo_name) |
|
180 | settings_model = VcsSettingsModel(repo=repo_name) | |
181 | ui_settings = settings_model.get_ui_settings() |
|
181 | ui_settings = settings_model.get_ui_settings() | |
182 |
|
182 | |||
183 | # NOTE(marcink): This should be also in sync with |
|
183 | # NOTE(marcink): This should be also in sync with | |
184 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data |
|
184 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data | |
185 | store = [x for x in ui_settings if x.key == '/'] |
|
185 | store = [x for x in ui_settings if x.key == '/'] | |
186 | repo_store = '' |
|
186 | repo_store = '' | |
187 | if store: |
|
187 | if store: | |
188 | repo_store = store[0].value |
|
188 | repo_store = store[0].value | |
189 |
|
189 | |||
190 | scm_data = { |
|
190 | scm_data = { | |
191 | 'ip': get_ip_addr(environ), |
|
191 | 'ip': get_ip_addr(environ), | |
192 | 'username': username, |
|
192 | 'username': username, | |
193 | 'user_id': user_id, |
|
193 | 'user_id': user_id, | |
194 | 'action': action, |
|
194 | 'action': action, | |
195 | 'repository': repo_name, |
|
195 | 'repository': repo_name, | |
196 | 'scm': scm, |
|
196 | 'scm': scm, | |
197 | 'config': rhodecode.CONFIG['__file__'], |
|
197 | 'config': rhodecode.CONFIG['__file__'], | |
198 | 'repo_store': repo_store, |
|
198 | 'repo_store': repo_store, | |
199 | 'make_lock': make_lock, |
|
199 | 'make_lock': make_lock, | |
200 | 'locked_by': locked_by, |
|
200 | 'locked_by': locked_by, | |
201 | 'server_url': utils2.get_server_url(environ), |
|
201 | 'server_url': utils2.get_server_url(environ), | |
202 | 'user_agent': get_user_agent(environ), |
|
202 | 'user_agent': get_user_agent(environ), | |
203 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
203 | 'hooks': get_enabled_hook_classes(ui_settings), | |
204 | 'is_shadow_repo': is_shadow_repo, |
|
204 | 'is_shadow_repo': is_shadow_repo, | |
205 | 'detect_force_push': detect_force_push, |
|
205 | 'detect_force_push': detect_force_push, | |
206 | 'check_branch_perms': check_branch_perms, |
|
206 | 'check_branch_perms': check_branch_perms, | |
207 | } |
|
207 | } | |
208 | return scm_data |
|
208 | return scm_data | |
209 |
|
209 | |||
210 |
|
210 | |||
211 | class BasicAuth(AuthBasicAuthenticator): |
|
211 | class BasicAuth(AuthBasicAuthenticator): | |
212 |
|
212 | |||
213 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
213 | def __init__(self, realm, authfunc, registry, auth_http_code=None, | |
214 | initial_call_detection=False, acl_repo_name=None): |
|
214 | initial_call_detection=False, acl_repo_name=None, rc_realm=''): | |
215 | self.realm = realm |
|
215 | self.realm = realm | |
|
216 | self.rc_realm = rc_realm | |||
216 | self.initial_call = initial_call_detection |
|
217 | self.initial_call = initial_call_detection | |
217 | self.authfunc = authfunc |
|
218 | self.authfunc = authfunc | |
218 | self.registry = registry |
|
219 | self.registry = registry | |
219 | self.acl_repo_name = acl_repo_name |
|
220 | self.acl_repo_name = acl_repo_name | |
220 | self._rc_auth_http_code = auth_http_code |
|
221 | self._rc_auth_http_code = auth_http_code | |
221 |
|
222 | |||
222 | def _get_response_from_code(self, http_code): |
|
223 | def _get_response_from_code(self, http_code): | |
223 | try: |
|
224 | try: | |
224 | return get_exception(safe_int(http_code)) |
|
225 | return get_exception(safe_int(http_code)) | |
225 | except Exception: |
|
226 | except Exception: | |
226 | log.exception('Failed to fetch response for code %s', http_code) |
|
227 | log.exception('Failed to fetch response for code %s', http_code) | |
227 | return HTTPForbidden |
|
228 | return HTTPForbidden | |
228 |
|
229 | |||
229 | def get_rc_realm(self): |
|
230 | def get_rc_realm(self): | |
230 |
return safe_str(self.r |
|
231 | return safe_str(self.rc_realm) | |
231 |
|
232 | |||
232 | def build_authentication(self): |
|
233 | def build_authentication(self): | |
233 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
234 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) | |
234 | if self._rc_auth_http_code and not self.initial_call: |
|
235 | if self._rc_auth_http_code and not self.initial_call: | |
235 | # return alternative HTTP code if alternative http return code |
|
236 | # return alternative HTTP code if alternative http return code | |
236 | # is specified in RhodeCode config, but ONLY if it's not the |
|
237 | # is specified in RhodeCode config, but ONLY if it's not the | |
237 | # FIRST call |
|
238 | # FIRST call | |
238 | custom_response_klass = self._get_response_from_code( |
|
239 | custom_response_klass = self._get_response_from_code( | |
239 | self._rc_auth_http_code) |
|
240 | self._rc_auth_http_code) | |
240 | return custom_response_klass(headers=head) |
|
241 | return custom_response_klass(headers=head) | |
241 | return HTTPUnauthorized(headers=head) |
|
242 | return HTTPUnauthorized(headers=head) | |
242 |
|
243 | |||
243 | def authenticate(self, environ): |
|
244 | def authenticate(self, environ): | |
244 | authorization = AUTHORIZATION(environ) |
|
245 | authorization = AUTHORIZATION(environ) | |
245 | if not authorization: |
|
246 | if not authorization: | |
246 | return self.build_authentication() |
|
247 | return self.build_authentication() | |
247 | (authmeth, auth) = authorization.split(' ', 1) |
|
248 | (authmeth, auth) = authorization.split(' ', 1) | |
248 | if 'basic' != authmeth.lower(): |
|
249 | if 'basic' != authmeth.lower(): | |
249 | return self.build_authentication() |
|
250 | return self.build_authentication() | |
250 | auth = auth.strip().decode('base64') |
|
251 | auth = auth.strip().decode('base64') | |
251 | _parts = auth.split(':', 1) |
|
252 | _parts = auth.split(':', 1) | |
252 | if len(_parts) == 2: |
|
253 | if len(_parts) == 2: | |
253 | username, password = _parts |
|
254 | username, password = _parts | |
254 | auth_data = self.authfunc( |
|
255 | auth_data = self.authfunc( | |
255 | username, password, environ, VCS_TYPE, |
|
256 | username, password, environ, VCS_TYPE, | |
256 | registry=self.registry, acl_repo_name=self.acl_repo_name) |
|
257 | registry=self.registry, acl_repo_name=self.acl_repo_name) | |
257 | if auth_data: |
|
258 | if auth_data: | |
258 | return {'username': username, 'auth_data': auth_data} |
|
259 | return {'username': username, 'auth_data': auth_data} | |
259 | if username and password: |
|
260 | if username and password: | |
260 | # we mark that we actually executed authentication once, at |
|
261 | # we mark that we actually executed authentication once, at | |
261 | # that point we can use the alternative auth code |
|
262 | # that point we can use the alternative auth code | |
262 | self.initial_call = False |
|
263 | self.initial_call = False | |
263 |
|
264 | |||
264 | return self.build_authentication() |
|
265 | return self.build_authentication() | |
265 |
|
266 | |||
266 | __call__ = authenticate |
|
267 | __call__ = authenticate | |
267 |
|
268 | |||
268 |
|
269 | |||
269 | def calculate_version_hash(config): |
|
270 | def calculate_version_hash(config): | |
270 | return sha1( |
|
271 | return sha1( | |
271 | config.get('beaker.session.secret', '') + |
|
272 | config.get('beaker.session.secret', '') + | |
272 | rhodecode.__version__)[:8] |
|
273 | rhodecode.__version__)[:8] | |
273 |
|
274 | |||
274 |
|
275 | |||
275 | def get_current_lang(request): |
|
276 | def get_current_lang(request): | |
276 | # NOTE(marcink): remove after pyramid move |
|
277 | # NOTE(marcink): remove after pyramid move | |
277 | try: |
|
278 | try: | |
278 | return translation.get_lang()[0] |
|
279 | return translation.get_lang()[0] | |
279 | except: |
|
280 | except: | |
280 | pass |
|
281 | pass | |
281 |
|
282 | |||
282 | return getattr(request, '_LOCALE_', request.locale_name) |
|
283 | return getattr(request, '_LOCALE_', request.locale_name) | |
283 |
|
284 | |||
284 |
|
285 | |||
285 | def attach_context_attributes(context, request, user_id=None): |
|
286 | def attach_context_attributes(context, request, user_id=None): | |
286 | """ |
|
287 | """ | |
287 | Attach variables into template context called `c`. |
|
288 | Attach variables into template context called `c`. | |
288 | """ |
|
289 | """ | |
289 | config = request.registry.settings |
|
290 | config = request.registry.settings | |
290 |
|
291 | |||
291 | rc_config = SettingsModel().get_all_settings(cache=True, from_request=False) |
|
292 | rc_config = SettingsModel().get_all_settings(cache=True, from_request=False) | |
292 | context.rc_config = rc_config |
|
293 | context.rc_config = rc_config | |
293 | context.rhodecode_version = rhodecode.__version__ |
|
294 | context.rhodecode_version = rhodecode.__version__ | |
294 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
295 | context.rhodecode_edition = config.get('rhodecode.edition') | |
295 | # unique secret + version does not leak the version but keep consistency |
|
296 | # unique secret + version does not leak the version but keep consistency | |
296 | context.rhodecode_version_hash = calculate_version_hash(config) |
|
297 | context.rhodecode_version_hash = calculate_version_hash(config) | |
297 |
|
298 | |||
298 | # Default language set for the incoming request |
|
299 | # Default language set for the incoming request | |
299 | context.language = get_current_lang(request) |
|
300 | context.language = get_current_lang(request) | |
300 |
|
301 | |||
301 | # Visual options |
|
302 | # Visual options | |
302 | context.visual = AttributeDict({}) |
|
303 | context.visual = AttributeDict({}) | |
303 |
|
304 | |||
304 | # DB stored Visual Items |
|
305 | # DB stored Visual Items | |
305 | context.visual.show_public_icon = str2bool( |
|
306 | context.visual.show_public_icon = str2bool( | |
306 | rc_config.get('rhodecode_show_public_icon')) |
|
307 | rc_config.get('rhodecode_show_public_icon')) | |
307 | context.visual.show_private_icon = str2bool( |
|
308 | context.visual.show_private_icon = str2bool( | |
308 | rc_config.get('rhodecode_show_private_icon')) |
|
309 | rc_config.get('rhodecode_show_private_icon')) | |
309 | context.visual.stylify_metatags = str2bool( |
|
310 | context.visual.stylify_metatags = str2bool( | |
310 | rc_config.get('rhodecode_stylify_metatags')) |
|
311 | rc_config.get('rhodecode_stylify_metatags')) | |
311 | context.visual.dashboard_items = safe_int( |
|
312 | context.visual.dashboard_items = safe_int( | |
312 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
313 | rc_config.get('rhodecode_dashboard_items', 100)) | |
313 | context.visual.admin_grid_items = safe_int( |
|
314 | context.visual.admin_grid_items = safe_int( | |
314 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
315 | rc_config.get('rhodecode_admin_grid_items', 100)) | |
315 | context.visual.show_revision_number = str2bool( |
|
316 | context.visual.show_revision_number = str2bool( | |
316 | rc_config.get('rhodecode_show_revision_number', True)) |
|
317 | rc_config.get('rhodecode_show_revision_number', True)) | |
317 | context.visual.show_sha_length = safe_int( |
|
318 | context.visual.show_sha_length = safe_int( | |
318 | rc_config.get('rhodecode_show_sha_length', 100)) |
|
319 | rc_config.get('rhodecode_show_sha_length', 100)) | |
319 | context.visual.repository_fields = str2bool( |
|
320 | context.visual.repository_fields = str2bool( | |
320 | rc_config.get('rhodecode_repository_fields')) |
|
321 | rc_config.get('rhodecode_repository_fields')) | |
321 | context.visual.show_version = str2bool( |
|
322 | context.visual.show_version = str2bool( | |
322 | rc_config.get('rhodecode_show_version')) |
|
323 | rc_config.get('rhodecode_show_version')) | |
323 | context.visual.use_gravatar = str2bool( |
|
324 | context.visual.use_gravatar = str2bool( | |
324 | rc_config.get('rhodecode_use_gravatar')) |
|
325 | rc_config.get('rhodecode_use_gravatar')) | |
325 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
326 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') | |
326 | context.visual.default_renderer = rc_config.get( |
|
327 | context.visual.default_renderer = rc_config.get( | |
327 | 'rhodecode_markup_renderer', 'rst') |
|
328 | 'rhodecode_markup_renderer', 'rst') | |
328 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
329 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES | |
329 | context.visual.rhodecode_support_url = \ |
|
330 | context.visual.rhodecode_support_url = \ | |
330 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
331 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') | |
331 |
|
332 | |||
332 | context.visual.affected_files_cut_off = 60 |
|
333 | context.visual.affected_files_cut_off = 60 | |
333 |
|
334 | |||
334 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
335 | context.pre_code = rc_config.get('rhodecode_pre_code') | |
335 | context.post_code = rc_config.get('rhodecode_post_code') |
|
336 | context.post_code = rc_config.get('rhodecode_post_code') | |
336 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
337 | context.rhodecode_name = rc_config.get('rhodecode_title') | |
337 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
338 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') | |
338 | # if we have specified default_encoding in the request, it has more |
|
339 | # if we have specified default_encoding in the request, it has more | |
339 | # priority |
|
340 | # priority | |
340 | if request.GET.get('default_encoding'): |
|
341 | if request.GET.get('default_encoding'): | |
341 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
342 | context.default_encodings.insert(0, request.GET.get('default_encoding')) | |
342 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
343 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') | |
343 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') |
|
344 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') | |
344 |
|
345 | |||
345 | # INI stored |
|
346 | # INI stored | |
346 | context.labs_active = str2bool( |
|
347 | context.labs_active = str2bool( | |
347 | config.get('labs_settings_active', 'false')) |
|
348 | config.get('labs_settings_active', 'false')) | |
348 | context.ssh_enabled = str2bool( |
|
349 | context.ssh_enabled = str2bool( | |
349 | config.get('ssh.generate_authorized_keyfile', 'false')) |
|
350 | config.get('ssh.generate_authorized_keyfile', 'false')) | |
350 | context.ssh_key_generator_enabled = str2bool( |
|
351 | context.ssh_key_generator_enabled = str2bool( | |
351 | config.get('ssh.enable_ui_key_generator', 'true')) |
|
352 | config.get('ssh.enable_ui_key_generator', 'true')) | |
352 |
|
353 | |||
353 | context.visual.allow_repo_location_change = str2bool( |
|
354 | context.visual.allow_repo_location_change = str2bool( | |
354 | config.get('allow_repo_location_change', True)) |
|
355 | config.get('allow_repo_location_change', True)) | |
355 | context.visual.allow_custom_hooks_settings = str2bool( |
|
356 | context.visual.allow_custom_hooks_settings = str2bool( | |
356 | config.get('allow_custom_hooks_settings', True)) |
|
357 | config.get('allow_custom_hooks_settings', True)) | |
357 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
358 | context.debug_style = str2bool(config.get('debug_style', False)) | |
358 |
|
359 | |||
359 | context.rhodecode_instanceid = config.get('instance_id') |
|
360 | context.rhodecode_instanceid = config.get('instance_id') | |
360 |
|
361 | |||
361 | context.visual.cut_off_limit_diff = safe_int( |
|
362 | context.visual.cut_off_limit_diff = safe_int( | |
362 | config.get('cut_off_limit_diff')) |
|
363 | config.get('cut_off_limit_diff')) | |
363 | context.visual.cut_off_limit_file = safe_int( |
|
364 | context.visual.cut_off_limit_file = safe_int( | |
364 | config.get('cut_off_limit_file')) |
|
365 | config.get('cut_off_limit_file')) | |
365 |
|
366 | |||
366 | context.license = AttributeDict({}) |
|
367 | context.license = AttributeDict({}) | |
367 | context.license.hide_license_info = str2bool( |
|
368 | context.license.hide_license_info = str2bool( | |
368 | config.get('license.hide_license_info', False)) |
|
369 | config.get('license.hide_license_info', False)) | |
369 |
|
370 | |||
370 | # AppEnlight |
|
371 | # AppEnlight | |
371 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) |
|
372 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) | |
372 | context.appenlight_api_public_key = config.get( |
|
373 | context.appenlight_api_public_key = config.get( | |
373 | 'appenlight.api_public_key', '') |
|
374 | 'appenlight.api_public_key', '') | |
374 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
375 | context.appenlight_server_url = config.get('appenlight.server_url', '') | |
375 |
|
376 | |||
376 | diffmode = { |
|
377 | diffmode = { | |
377 | "unified": "unified", |
|
378 | "unified": "unified", | |
378 | "sideside": "sideside" |
|
379 | "sideside": "sideside" | |
379 | }.get(request.GET.get('diffmode')) |
|
380 | }.get(request.GET.get('diffmode')) | |
380 |
|
381 | |||
381 | is_api = hasattr(request, 'rpc_user') |
|
382 | is_api = hasattr(request, 'rpc_user') | |
382 | session_attrs = { |
|
383 | session_attrs = { | |
383 | # defaults |
|
384 | # defaults | |
384 | "clone_url_format": "http", |
|
385 | "clone_url_format": "http", | |
385 | "diffmode": "sideside" |
|
386 | "diffmode": "sideside" | |
386 | } |
|
387 | } | |
387 |
|
388 | |||
388 | if not is_api: |
|
389 | if not is_api: | |
389 | # don't access pyramid session for API calls |
|
390 | # don't access pyramid session for API calls | |
390 | if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'): |
|
391 | if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'): | |
391 | request.session['rc_user_session_attr.diffmode'] = diffmode |
|
392 | request.session['rc_user_session_attr.diffmode'] = diffmode | |
392 |
|
393 | |||
393 | # session settings per user |
|
394 | # session settings per user | |
394 |
|
395 | |||
395 | for k, v in request.session.items(): |
|
396 | for k, v in request.session.items(): | |
396 | pref = 'rc_user_session_attr.' |
|
397 | pref = 'rc_user_session_attr.' | |
397 | if k and k.startswith(pref): |
|
398 | if k and k.startswith(pref): | |
398 | k = k[len(pref):] |
|
399 | k = k[len(pref):] | |
399 | session_attrs[k] = v |
|
400 | session_attrs[k] = v | |
400 |
|
401 | |||
401 | context.user_session_attrs = session_attrs |
|
402 | context.user_session_attrs = session_attrs | |
402 |
|
403 | |||
403 | # JS template context |
|
404 | # JS template context | |
404 | context.template_context = { |
|
405 | context.template_context = { | |
405 | 'repo_name': None, |
|
406 | 'repo_name': None, | |
406 | 'repo_type': None, |
|
407 | 'repo_type': None, | |
407 | 'repo_landing_commit': None, |
|
408 | 'repo_landing_commit': None, | |
408 | 'rhodecode_user': { |
|
409 | 'rhodecode_user': { | |
409 | 'username': None, |
|
410 | 'username': None, | |
410 | 'email': None, |
|
411 | 'email': None, | |
411 | 'notification_status': False |
|
412 | 'notification_status': False | |
412 | }, |
|
413 | }, | |
413 | 'session_attrs': session_attrs, |
|
414 | 'session_attrs': session_attrs, | |
414 | 'visual': { |
|
415 | 'visual': { | |
415 | 'default_renderer': None |
|
416 | 'default_renderer': None | |
416 | }, |
|
417 | }, | |
417 | 'commit_data': { |
|
418 | 'commit_data': { | |
418 | 'commit_id': None |
|
419 | 'commit_id': None | |
419 | }, |
|
420 | }, | |
420 | 'pull_request_data': {'pull_request_id': None}, |
|
421 | 'pull_request_data': {'pull_request_id': None}, | |
421 | 'timeago': { |
|
422 | 'timeago': { | |
422 | 'refresh_time': 120 * 1000, |
|
423 | 'refresh_time': 120 * 1000, | |
423 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
424 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 | |
424 | }, |
|
425 | }, | |
425 | 'pyramid_dispatch': { |
|
426 | 'pyramid_dispatch': { | |
426 |
|
427 | |||
427 | }, |
|
428 | }, | |
428 | 'extra': {'plugins': {}} |
|
429 | 'extra': {'plugins': {}} | |
429 | } |
|
430 | } | |
430 | # END CONFIG VARS |
|
431 | # END CONFIG VARS | |
431 | if is_api: |
|
432 | if is_api: | |
432 | csrf_token = None |
|
433 | csrf_token = None | |
433 | else: |
|
434 | else: | |
434 | csrf_token = auth.get_csrf_token(session=request.session) |
|
435 | csrf_token = auth.get_csrf_token(session=request.session) | |
435 |
|
436 | |||
436 | context.csrf_token = csrf_token |
|
437 | context.csrf_token = csrf_token | |
437 | context.backends = rhodecode.BACKENDS.keys() |
|
438 | context.backends = rhodecode.BACKENDS.keys() | |
438 | context.backends.sort() |
|
439 | context.backends.sort() | |
439 | unread_count = 0 |
|
440 | unread_count = 0 | |
440 | user_bookmark_list = [] |
|
441 | user_bookmark_list = [] | |
441 | if user_id: |
|
442 | if user_id: | |
442 | unread_count = NotificationModel().get_unread_cnt_for_user(user_id) |
|
443 | unread_count = NotificationModel().get_unread_cnt_for_user(user_id) | |
443 | user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id) |
|
444 | user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id) | |
444 | context.unread_notifications = unread_count |
|
445 | context.unread_notifications = unread_count | |
445 | context.bookmark_items = user_bookmark_list |
|
446 | context.bookmark_items = user_bookmark_list | |
446 |
|
447 | |||
447 | # web case |
|
448 | # web case | |
448 | if hasattr(request, 'user'): |
|
449 | if hasattr(request, 'user'): | |
449 | context.auth_user = request.user |
|
450 | context.auth_user = request.user | |
450 | context.rhodecode_user = request.user |
|
451 | context.rhodecode_user = request.user | |
451 |
|
452 | |||
452 | # api case |
|
453 | # api case | |
453 | if hasattr(request, 'rpc_user'): |
|
454 | if hasattr(request, 'rpc_user'): | |
454 | context.auth_user = request.rpc_user |
|
455 | context.auth_user = request.rpc_user | |
455 | context.rhodecode_user = request.rpc_user |
|
456 | context.rhodecode_user = request.rpc_user | |
456 |
|
457 | |||
457 | # attach the whole call context to the request |
|
458 | # attach the whole call context to the request | |
458 | request.call_context = context |
|
459 | request.call_context = context | |
459 |
|
460 | |||
460 |
|
461 | |||
461 | def get_auth_user(request): |
|
462 | def get_auth_user(request): | |
462 | environ = request.environ |
|
463 | environ = request.environ | |
463 | session = request.session |
|
464 | session = request.session | |
464 |
|
465 | |||
465 | ip_addr = get_ip_addr(environ) |
|
466 | ip_addr = get_ip_addr(environ) | |
466 |
|
467 | |||
467 | # make sure that we update permissions each time we call controller |
|
468 | # make sure that we update permissions each time we call controller | |
468 | _auth_token = (request.GET.get('auth_token', '') or request.GET.get('api_key', '')) |
|
469 | _auth_token = (request.GET.get('auth_token', '') or request.GET.get('api_key', '')) | |
469 | if not _auth_token and request.matchdict: |
|
470 | if not _auth_token and request.matchdict: | |
470 | url_auth_token = request.matchdict.get('_auth_token') |
|
471 | url_auth_token = request.matchdict.get('_auth_token') | |
471 | _auth_token = url_auth_token |
|
472 | _auth_token = url_auth_token | |
472 | if _auth_token: |
|
473 | if _auth_token: | |
473 | log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) |
|
474 | log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) | |
474 |
|
475 | |||
475 | if _auth_token: |
|
476 | if _auth_token: | |
476 | # when using API_KEY we assume user exists, and |
|
477 | # when using API_KEY we assume user exists, and | |
477 | # doesn't need auth based on cookies. |
|
478 | # doesn't need auth based on cookies. | |
478 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
479 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) | |
479 | authenticated = False |
|
480 | authenticated = False | |
480 | else: |
|
481 | else: | |
481 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
482 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
482 | try: |
|
483 | try: | |
483 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
484 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), | |
484 | ip_addr=ip_addr) |
|
485 | ip_addr=ip_addr) | |
485 | except UserCreationError as e: |
|
486 | except UserCreationError as e: | |
486 | h.flash(e, 'error') |
|
487 | h.flash(e, 'error') | |
487 | # container auth or other auth functions that create users |
|
488 | # container auth or other auth functions that create users | |
488 | # on the fly can throw this exception signaling that there's |
|
489 | # on the fly can throw this exception signaling that there's | |
489 | # issue with user creation, explanation should be provided |
|
490 | # issue with user creation, explanation should be provided | |
490 | # in Exception itself. We then create a simple blank |
|
491 | # in Exception itself. We then create a simple blank | |
491 | # AuthUser |
|
492 | # AuthUser | |
492 | auth_user = AuthUser(ip_addr=ip_addr) |
|
493 | auth_user = AuthUser(ip_addr=ip_addr) | |
493 |
|
494 | |||
494 | # in case someone changes a password for user it triggers session |
|
495 | # in case someone changes a password for user it triggers session | |
495 | # flush and forces a re-login |
|
496 | # flush and forces a re-login | |
496 | if password_changed(auth_user, session): |
|
497 | if password_changed(auth_user, session): | |
497 | session.invalidate() |
|
498 | session.invalidate() | |
498 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
499 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
499 | auth_user = AuthUser(ip_addr=ip_addr) |
|
500 | auth_user = AuthUser(ip_addr=ip_addr) | |
500 |
|
501 | |||
501 | authenticated = cookie_store.get('is_authenticated') |
|
502 | authenticated = cookie_store.get('is_authenticated') | |
502 |
|
503 | |||
503 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
504 | if not auth_user.is_authenticated and auth_user.is_user_object: | |
504 | # user is not authenticated and not empty |
|
505 | # user is not authenticated and not empty | |
505 | auth_user.set_authenticated(authenticated) |
|
506 | auth_user.set_authenticated(authenticated) | |
506 |
|
507 | |||
507 | return auth_user, _auth_token |
|
508 | return auth_user, _auth_token | |
508 |
|
509 | |||
509 |
|
510 | |||
510 | def h_filter(s): |
|
511 | def h_filter(s): | |
511 | """ |
|
512 | """ | |
512 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` |
|
513 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` | |
513 | we wrap this with additional functionality that converts None to empty |
|
514 | we wrap this with additional functionality that converts None to empty | |
514 | strings |
|
515 | strings | |
515 | """ |
|
516 | """ | |
516 | if s is None: |
|
517 | if s is None: | |
517 | return markupsafe.Markup() |
|
518 | return markupsafe.Markup() | |
518 | return markupsafe.escape(s) |
|
519 | return markupsafe.escape(s) | |
519 |
|
520 | |||
520 |
|
521 | |||
521 | def add_events_routes(config): |
|
522 | def add_events_routes(config): | |
522 | """ |
|
523 | """ | |
523 | Adds routing that can be used in events. Because some events are triggered |
|
524 | Adds routing that can be used in events. Because some events are triggered | |
524 | outside of pyramid context, we need to bootstrap request with some |
|
525 | outside of pyramid context, we need to bootstrap request with some | |
525 | routing registered |
|
526 | routing registered | |
526 | """ |
|
527 | """ | |
527 |
|
528 | |||
528 | from rhodecode.apps._base import ADMIN_PREFIX |
|
529 | from rhodecode.apps._base import ADMIN_PREFIX | |
529 |
|
530 | |||
530 | config.add_route(name='home', pattern='/') |
|
531 | config.add_route(name='home', pattern='/') | |
531 | config.add_route(name='main_page_repos_data', pattern='/_home_repos') |
|
532 | config.add_route(name='main_page_repos_data', pattern='/_home_repos') | |
532 | config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups') |
|
533 | config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups') | |
533 |
|
534 | |||
534 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') |
|
535 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') | |
535 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') |
|
536 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') | |
536 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
537 | config.add_route(name='repo_summary', pattern='/{repo_name}') | |
537 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') |
|
538 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') | |
538 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
539 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') | |
539 |
|
540 | |||
540 | config.add_route(name='pullrequest_show', |
|
541 | config.add_route(name='pullrequest_show', | |
541 | pattern='/{repo_name}/pull-request/{pull_request_id}') |
|
542 | pattern='/{repo_name}/pull-request/{pull_request_id}') | |
542 | config.add_route(name='pull_requests_global', |
|
543 | config.add_route(name='pull_requests_global', | |
543 | pattern='/pull-request/{pull_request_id}') |
|
544 | pattern='/pull-request/{pull_request_id}') | |
544 |
|
545 | |||
545 | config.add_route(name='repo_commit', |
|
546 | config.add_route(name='repo_commit', | |
546 | pattern='/{repo_name}/changeset/{commit_id}') |
|
547 | pattern='/{repo_name}/changeset/{commit_id}') | |
547 | config.add_route(name='repo_files', |
|
548 | config.add_route(name='repo_files', | |
548 | pattern='/{repo_name}/files/{commit_id}/{f_path}') |
|
549 | pattern='/{repo_name}/files/{commit_id}/{f_path}') | |
549 |
|
550 | |||
550 | config.add_route(name='hovercard_user', |
|
551 | config.add_route(name='hovercard_user', | |
551 | pattern='/_hovercard/user/{user_id}') |
|
552 | pattern='/_hovercard/user/{user_id}') | |
552 |
|
553 | |||
553 | config.add_route(name='hovercard_user_group', |
|
554 | config.add_route(name='hovercard_user_group', | |
554 | pattern='/_hovercard/user_group/{user_group_id}') |
|
555 | pattern='/_hovercard/user_group/{user_group_id}') | |
555 |
|
556 | |||
556 | config.add_route(name='hovercard_pull_request', |
|
557 | config.add_route(name='hovercard_pull_request', | |
557 | pattern='/_hovercard/pull_request/{pull_request_id}') |
|
558 | pattern='/_hovercard/pull_request/{pull_request_id}') | |
558 |
|
559 | |||
559 | config.add_route(name='hovercard_repo_commit', |
|
560 | config.add_route(name='hovercard_repo_commit', | |
560 | pattern='/_hovercard/commit/{repo_name}/{commit_id}') |
|
561 | pattern='/_hovercard/commit/{repo_name}/{commit_id}') | |
561 |
|
562 | |||
562 |
|
563 | |||
563 | def bootstrap_config(request): |
|
564 | def bootstrap_config(request): | |
564 | import pyramid.testing |
|
565 | import pyramid.testing | |
565 | registry = pyramid.testing.Registry('RcTestRegistry') |
|
566 | registry = pyramid.testing.Registry('RcTestRegistry') | |
566 |
|
567 | |||
567 | config = pyramid.testing.setUp(registry=registry, request=request) |
|
568 | config = pyramid.testing.setUp(registry=registry, request=request) | |
568 |
|
569 | |||
569 | # allow pyramid lookup in testing |
|
570 | # allow pyramid lookup in testing | |
570 | config.include('pyramid_mako') |
|
571 | config.include('pyramid_mako') | |
571 | config.include('rhodecode.lib.rc_beaker') |
|
572 | config.include('rhodecode.lib.rc_beaker') | |
572 | config.include('rhodecode.lib.rc_cache') |
|
573 | config.include('rhodecode.lib.rc_cache') | |
573 |
|
574 | |||
574 | add_events_routes(config) |
|
575 | add_events_routes(config) | |
575 |
|
576 | |||
576 | return config |
|
577 | return config | |
577 |
|
578 | |||
578 |
|
579 | |||
579 | def bootstrap_request(**kwargs): |
|
580 | def bootstrap_request(**kwargs): | |
580 | import pyramid.testing |
|
581 | import pyramid.testing | |
581 |
|
582 | |||
582 | class TestRequest(pyramid.testing.DummyRequest): |
|
583 | class TestRequest(pyramid.testing.DummyRequest): | |
583 | application_url = kwargs.pop('application_url', 'http://example.com') |
|
584 | application_url = kwargs.pop('application_url', 'http://example.com') | |
584 | host = kwargs.pop('host', 'example.com:80') |
|
585 | host = kwargs.pop('host', 'example.com:80') | |
585 | domain = kwargs.pop('domain', 'example.com') |
|
586 | domain = kwargs.pop('domain', 'example.com') | |
586 |
|
587 | |||
587 | def translate(self, msg): |
|
588 | def translate(self, msg): | |
588 | return msg |
|
589 | return msg | |
589 |
|
590 | |||
590 | def plularize(self, singular, plural, n): |
|
591 | def plularize(self, singular, plural, n): | |
591 | return singular |
|
592 | return singular | |
592 |
|
593 | |||
593 | def get_partial_renderer(self, tmpl_name): |
|
594 | def get_partial_renderer(self, tmpl_name): | |
594 |
|
595 | |||
595 | from rhodecode.lib.partial_renderer import get_partial_renderer |
|
596 | from rhodecode.lib.partial_renderer import get_partial_renderer | |
596 | return get_partial_renderer(request=self, tmpl_name=tmpl_name) |
|
597 | return get_partial_renderer(request=self, tmpl_name=tmpl_name) | |
597 |
|
598 | |||
598 | _call_context = TemplateArgs() |
|
599 | _call_context = TemplateArgs() | |
599 | _call_context.visual = TemplateArgs() |
|
600 | _call_context.visual = TemplateArgs() | |
600 | _call_context.visual.show_sha_length = 12 |
|
601 | _call_context.visual.show_sha_length = 12 | |
601 | _call_context.visual.show_revision_number = True |
|
602 | _call_context.visual.show_revision_number = True | |
602 |
|
603 | |||
603 | @property |
|
604 | @property | |
604 | def call_context(self): |
|
605 | def call_context(self): | |
605 | return self._call_context |
|
606 | return self._call_context | |
606 |
|
607 | |||
607 | class TestDummySession(pyramid.testing.DummySession): |
|
608 | class TestDummySession(pyramid.testing.DummySession): | |
608 | def save(*arg, **kw): |
|
609 | def save(*arg, **kw): | |
609 | pass |
|
610 | pass | |
610 |
|
611 | |||
611 | request = TestRequest(**kwargs) |
|
612 | request = TestRequest(**kwargs) | |
612 | request.session = TestDummySession() |
|
613 | request.session = TestDummySession() | |
613 |
|
614 | |||
614 | return request |
|
615 | return request | |
615 |
|
616 |
@@ -1,678 +1,679 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | SimpleVCS middleware for handling protocol request (push/clone etc.) |
|
22 | SimpleVCS middleware for handling protocol request (push/clone etc.) | |
23 | It's implemented with basic auth function |
|
23 | It's implemented with basic auth function | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import re |
|
27 | import re | |
28 | import logging |
|
28 | import logging | |
29 | import importlib |
|
29 | import importlib | |
30 | from functools import wraps |
|
30 | from functools import wraps | |
31 | from StringIO import StringIO |
|
31 | from StringIO import StringIO | |
32 | from lxml import etree |
|
32 | from lxml import etree | |
33 |
|
33 | |||
34 | import time |
|
34 | import time | |
35 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
35 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE | |
36 |
|
36 | |||
37 | from pyramid.httpexceptions import ( |
|
37 | from pyramid.httpexceptions import ( | |
38 | HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError) |
|
38 | HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError) | |
39 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
39 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
40 |
|
40 | |||
41 | import rhodecode |
|
41 | import rhodecode | |
42 | from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin |
|
42 | from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin | |
43 | from rhodecode.lib import rc_cache |
|
43 | from rhodecode.lib import rc_cache | |
44 | from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
44 | from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware | |
45 | from rhodecode.lib.base import ( |
|
45 | from rhodecode.lib.base import ( | |
46 | BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) |
|
46 | BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) | |
47 | from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError) |
|
47 | from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError) | |
48 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
48 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon | |
49 | from rhodecode.lib.middleware import appenlight |
|
49 | from rhodecode.lib.middleware import appenlight | |
50 | from rhodecode.lib.middleware.utils import scm_app_http |
|
50 | from rhodecode.lib.middleware.utils import scm_app_http | |
51 | from rhodecode.lib.utils import is_valid_repo, SLUG_RE |
|
51 | from rhodecode.lib.utils import is_valid_repo, SLUG_RE | |
52 | from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode |
|
52 | from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode | |
53 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
53 | from rhodecode.lib.vcs.conf import settings as vcs_settings | |
54 | from rhodecode.lib.vcs.backends import base |
|
54 | from rhodecode.lib.vcs.backends import base | |
55 |
|
55 | |||
56 | from rhodecode.model import meta |
|
56 | from rhodecode.model import meta | |
57 | from rhodecode.model.db import User, Repository, PullRequest |
|
57 | from rhodecode.model.db import User, Repository, PullRequest | |
58 | from rhodecode.model.scm import ScmModel |
|
58 | from rhodecode.model.scm import ScmModel | |
59 | from rhodecode.model.pull_request import PullRequestModel |
|
59 | from rhodecode.model.pull_request import PullRequestModel | |
60 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
60 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel | |
61 |
|
61 | |||
62 | log = logging.getLogger(__name__) |
|
62 | log = logging.getLogger(__name__) | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | def extract_svn_txn_id(acl_repo_name, data): |
|
65 | def extract_svn_txn_id(acl_repo_name, data): | |
66 | """ |
|
66 | """ | |
67 | Helper method for extraction of svn txn_id from submitted XML data during |
|
67 | Helper method for extraction of svn txn_id from submitted XML data during | |
68 | POST operations |
|
68 | POST operations | |
69 | """ |
|
69 | """ | |
70 | try: |
|
70 | try: | |
71 | root = etree.fromstring(data) |
|
71 | root = etree.fromstring(data) | |
72 | pat = re.compile(r'/txn/(?P<txn_id>.*)') |
|
72 | pat = re.compile(r'/txn/(?P<txn_id>.*)') | |
73 | for el in root: |
|
73 | for el in root: | |
74 | if el.tag == '{DAV:}source': |
|
74 | if el.tag == '{DAV:}source': | |
75 | for sub_el in el: |
|
75 | for sub_el in el: | |
76 | if sub_el.tag == '{DAV:}href': |
|
76 | if sub_el.tag == '{DAV:}href': | |
77 | match = pat.search(sub_el.text) |
|
77 | match = pat.search(sub_el.text) | |
78 | if match: |
|
78 | if match: | |
79 | svn_tx_id = match.groupdict()['txn_id'] |
|
79 | svn_tx_id = match.groupdict()['txn_id'] | |
80 | txn_id = rc_cache.utils.compute_key_from_params( |
|
80 | txn_id = rc_cache.utils.compute_key_from_params( | |
81 | acl_repo_name, svn_tx_id) |
|
81 | acl_repo_name, svn_tx_id) | |
82 | return txn_id |
|
82 | return txn_id | |
83 | except Exception: |
|
83 | except Exception: | |
84 | log.exception('Failed to extract txn_id') |
|
84 | log.exception('Failed to extract txn_id') | |
85 |
|
85 | |||
86 |
|
86 | |||
87 | def initialize_generator(factory): |
|
87 | def initialize_generator(factory): | |
88 | """ |
|
88 | """ | |
89 | Initializes the returned generator by draining its first element. |
|
89 | Initializes the returned generator by draining its first element. | |
90 |
|
90 | |||
91 | This can be used to give a generator an initializer, which is the code |
|
91 | This can be used to give a generator an initializer, which is the code | |
92 | up to the first yield statement. This decorator enforces that the first |
|
92 | up to the first yield statement. This decorator enforces that the first | |
93 | produced element has the value ``"__init__"`` to make its special |
|
93 | produced element has the value ``"__init__"`` to make its special | |
94 | purpose very explicit in the using code. |
|
94 | purpose very explicit in the using code. | |
95 | """ |
|
95 | """ | |
96 |
|
96 | |||
97 | @wraps(factory) |
|
97 | @wraps(factory) | |
98 | def wrapper(*args, **kwargs): |
|
98 | def wrapper(*args, **kwargs): | |
99 | gen = factory(*args, **kwargs) |
|
99 | gen = factory(*args, **kwargs) | |
100 | try: |
|
100 | try: | |
101 | init = gen.next() |
|
101 | init = gen.next() | |
102 | except StopIteration: |
|
102 | except StopIteration: | |
103 | raise ValueError('Generator must yield at least one element.') |
|
103 | raise ValueError('Generator must yield at least one element.') | |
104 | if init != "__init__": |
|
104 | if init != "__init__": | |
105 | raise ValueError('First yielded element must be "__init__".') |
|
105 | raise ValueError('First yielded element must be "__init__".') | |
106 | return gen |
|
106 | return gen | |
107 | return wrapper |
|
107 | return wrapper | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | class SimpleVCS(object): |
|
110 | class SimpleVCS(object): | |
111 | """Common functionality for SCM HTTP handlers.""" |
|
111 | """Common functionality for SCM HTTP handlers.""" | |
112 |
|
112 | |||
113 | SCM = 'unknown' |
|
113 | SCM = 'unknown' | |
114 |
|
114 | |||
115 | acl_repo_name = None |
|
115 | acl_repo_name = None | |
116 | url_repo_name = None |
|
116 | url_repo_name = None | |
117 | vcs_repo_name = None |
|
117 | vcs_repo_name = None | |
118 | rc_extras = {} |
|
118 | rc_extras = {} | |
119 |
|
119 | |||
120 | # We have to handle requests to shadow repositories different than requests |
|
120 | # We have to handle requests to shadow repositories different than requests | |
121 | # to normal repositories. Therefore we have to distinguish them. To do this |
|
121 | # to normal repositories. Therefore we have to distinguish them. To do this | |
122 | # we use this regex which will match only on URLs pointing to shadow |
|
122 | # we use this regex which will match only on URLs pointing to shadow | |
123 | # repositories. |
|
123 | # repositories. | |
124 | shadow_repo_re = re.compile( |
|
124 | shadow_repo_re = re.compile( | |
125 | '(?P<groups>(?:{slug_pat}/)*)' # repo groups |
|
125 | '(?P<groups>(?:{slug_pat}/)*)' # repo groups | |
126 | '(?P<target>{slug_pat})/' # target repo |
|
126 | '(?P<target>{slug_pat})/' # target repo | |
127 | 'pull-request/(?P<pr_id>\d+)/' # pull request |
|
127 | 'pull-request/(?P<pr_id>\d+)/' # pull request | |
128 | 'repository$' # shadow repo |
|
128 | 'repository$' # shadow repo | |
129 | .format(slug_pat=SLUG_RE.pattern)) |
|
129 | .format(slug_pat=SLUG_RE.pattern)) | |
130 |
|
130 | |||
131 | def __init__(self, config, registry): |
|
131 | def __init__(self, config, registry): | |
132 | self.registry = registry |
|
132 | self.registry = registry | |
133 | self.config = config |
|
133 | self.config = config | |
134 | # re-populated by specialized middleware |
|
134 | # re-populated by specialized middleware | |
135 | self.repo_vcs_config = base.Config() |
|
135 | self.repo_vcs_config = base.Config() | |
136 | self.rhodecode_settings = SettingsModel().get_all_settings(cache=True) |
|
|||
137 |
|
136 | |||
138 | registry.rhodecode_settings = self.rhodecode_settings |
|
137 | rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False) | |
|
138 | realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH' | |||
|
139 | ||||
139 | # authenticate this VCS request using authfunc |
|
140 | # authenticate this VCS request using authfunc | |
140 | auth_ret_code_detection = \ |
|
141 | auth_ret_code_detection = \ | |
141 | str2bool(self.config.get('auth_ret_code_detection', False)) |
|
142 | str2bool(self.config.get('auth_ret_code_detection', False)) | |
142 | self.authenticate = BasicAuth( |
|
143 | self.authenticate = BasicAuth( | |
143 | '', authenticate, registry, config.get('auth_ret_code'), |
|
144 | '', authenticate, registry, config.get('auth_ret_code'), | |
144 | auth_ret_code_detection) |
|
145 | auth_ret_code_detection, rc_realm=realm) | |
145 | self.ip_addr = '0.0.0.0' |
|
146 | self.ip_addr = '0.0.0.0' | |
146 |
|
147 | |||
147 | @LazyProperty |
|
148 | @LazyProperty | |
148 | def global_vcs_config(self): |
|
149 | def global_vcs_config(self): | |
149 | try: |
|
150 | try: | |
150 | return VcsSettingsModel().get_ui_settings_as_config_obj() |
|
151 | return VcsSettingsModel().get_ui_settings_as_config_obj() | |
151 | except Exception: |
|
152 | except Exception: | |
152 | return base.Config() |
|
153 | return base.Config() | |
153 |
|
154 | |||
154 | @property |
|
155 | @property | |
155 | def base_path(self): |
|
156 | def base_path(self): | |
156 | settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING) |
|
157 | settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING) | |
157 |
|
158 | |||
158 | if not settings_path: |
|
159 | if not settings_path: | |
159 | settings_path = self.global_vcs_config.get(*VcsSettingsModel.PATH_SETTING) |
|
160 | settings_path = self.global_vcs_config.get(*VcsSettingsModel.PATH_SETTING) | |
160 |
|
161 | |||
161 | if not settings_path: |
|
162 | if not settings_path: | |
162 | # try, maybe we passed in explicitly as config option |
|
163 | # try, maybe we passed in explicitly as config option | |
163 | settings_path = self.config.get('base_path') |
|
164 | settings_path = self.config.get('base_path') | |
164 |
|
165 | |||
165 | if not settings_path: |
|
166 | if not settings_path: | |
166 | raise ValueError('FATAL: base_path is empty') |
|
167 | raise ValueError('FATAL: base_path is empty') | |
167 | return settings_path |
|
168 | return settings_path | |
168 |
|
169 | |||
169 | def set_repo_names(self, environ): |
|
170 | def set_repo_names(self, environ): | |
170 | """ |
|
171 | """ | |
171 | This will populate the attributes acl_repo_name, url_repo_name, |
|
172 | This will populate the attributes acl_repo_name, url_repo_name, | |
172 | vcs_repo_name and is_shadow_repo. In case of requests to normal (non |
|
173 | vcs_repo_name and is_shadow_repo. In case of requests to normal (non | |
173 | shadow) repositories all names are equal. In case of requests to a |
|
174 | shadow) repositories all names are equal. In case of requests to a | |
174 | shadow repository the acl-name points to the target repo of the pull |
|
175 | shadow repository the acl-name points to the target repo of the pull | |
175 | request and the vcs-name points to the shadow repo file system path. |
|
176 | request and the vcs-name points to the shadow repo file system path. | |
176 | The url-name is always the URL used by the vcs client program. |
|
177 | The url-name is always the URL used by the vcs client program. | |
177 |
|
178 | |||
178 | Example in case of a shadow repo: |
|
179 | Example in case of a shadow repo: | |
179 | acl_repo_name = RepoGroup/MyRepo |
|
180 | acl_repo_name = RepoGroup/MyRepo | |
180 | url_repo_name = RepoGroup/MyRepo/pull-request/3/repository |
|
181 | url_repo_name = RepoGroup/MyRepo/pull-request/3/repository | |
181 | vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3' |
|
182 | vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3' | |
182 | """ |
|
183 | """ | |
183 | # First we set the repo name from URL for all attributes. This is the |
|
184 | # First we set the repo name from URL for all attributes. This is the | |
184 | # default if handling normal (non shadow) repo requests. |
|
185 | # default if handling normal (non shadow) repo requests. | |
185 | self.url_repo_name = self._get_repository_name(environ) |
|
186 | self.url_repo_name = self._get_repository_name(environ) | |
186 | self.acl_repo_name = self.vcs_repo_name = self.url_repo_name |
|
187 | self.acl_repo_name = self.vcs_repo_name = self.url_repo_name | |
187 | self.is_shadow_repo = False |
|
188 | self.is_shadow_repo = False | |
188 |
|
189 | |||
189 | # Check if this is a request to a shadow repository. |
|
190 | # Check if this is a request to a shadow repository. | |
190 | match = self.shadow_repo_re.match(self.url_repo_name) |
|
191 | match = self.shadow_repo_re.match(self.url_repo_name) | |
191 | if match: |
|
192 | if match: | |
192 | match_dict = match.groupdict() |
|
193 | match_dict = match.groupdict() | |
193 |
|
194 | |||
194 | # Build acl repo name from regex match. |
|
195 | # Build acl repo name from regex match. | |
195 | acl_repo_name = safe_unicode('{groups}{target}'.format( |
|
196 | acl_repo_name = safe_unicode('{groups}{target}'.format( | |
196 | groups=match_dict['groups'] or '', |
|
197 | groups=match_dict['groups'] or '', | |
197 | target=match_dict['target'])) |
|
198 | target=match_dict['target'])) | |
198 |
|
199 | |||
199 | # Retrieve pull request instance by ID from regex match. |
|
200 | # Retrieve pull request instance by ID from regex match. | |
200 | pull_request = PullRequest.get(match_dict['pr_id']) |
|
201 | pull_request = PullRequest.get(match_dict['pr_id']) | |
201 |
|
202 | |||
202 | # Only proceed if we got a pull request and if acl repo name from |
|
203 | # Only proceed if we got a pull request and if acl repo name from | |
203 | # URL equals the target repo name of the pull request. |
|
204 | # URL equals the target repo name of the pull request. | |
204 | if pull_request and (acl_repo_name == pull_request.target_repo.repo_name): |
|
205 | if pull_request and (acl_repo_name == pull_request.target_repo.repo_name): | |
205 |
|
206 | |||
206 | # Get file system path to shadow repository. |
|
207 | # Get file system path to shadow repository. | |
207 | workspace_id = PullRequestModel()._workspace_id(pull_request) |
|
208 | workspace_id = PullRequestModel()._workspace_id(pull_request) | |
208 | vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) |
|
209 | vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) | |
209 |
|
210 | |||
210 | # Store names for later usage. |
|
211 | # Store names for later usage. | |
211 | self.vcs_repo_name = vcs_repo_name |
|
212 | self.vcs_repo_name = vcs_repo_name | |
212 | self.acl_repo_name = acl_repo_name |
|
213 | self.acl_repo_name = acl_repo_name | |
213 | self.is_shadow_repo = True |
|
214 | self.is_shadow_repo = True | |
214 |
|
215 | |||
215 | log.debug('Setting all VCS repository names: %s', { |
|
216 | log.debug('Setting all VCS repository names: %s', { | |
216 | 'acl_repo_name': self.acl_repo_name, |
|
217 | 'acl_repo_name': self.acl_repo_name, | |
217 | 'url_repo_name': self.url_repo_name, |
|
218 | 'url_repo_name': self.url_repo_name, | |
218 | 'vcs_repo_name': self.vcs_repo_name, |
|
219 | 'vcs_repo_name': self.vcs_repo_name, | |
219 | }) |
|
220 | }) | |
220 |
|
221 | |||
221 | @property |
|
222 | @property | |
222 | def scm_app(self): |
|
223 | def scm_app(self): | |
223 | custom_implementation = self.config['vcs.scm_app_implementation'] |
|
224 | custom_implementation = self.config['vcs.scm_app_implementation'] | |
224 | if custom_implementation == 'http': |
|
225 | if custom_implementation == 'http': | |
225 | log.debug('Using HTTP implementation of scm app.') |
|
226 | log.debug('Using HTTP implementation of scm app.') | |
226 | scm_app_impl = scm_app_http |
|
227 | scm_app_impl = scm_app_http | |
227 | else: |
|
228 | else: | |
228 | log.debug('Using custom implementation of scm_app: "{}"'.format( |
|
229 | log.debug('Using custom implementation of scm_app: "{}"'.format( | |
229 | custom_implementation)) |
|
230 | custom_implementation)) | |
230 | scm_app_impl = importlib.import_module(custom_implementation) |
|
231 | scm_app_impl = importlib.import_module(custom_implementation) | |
231 | return scm_app_impl |
|
232 | return scm_app_impl | |
232 |
|
233 | |||
233 | def _get_by_id(self, repo_name): |
|
234 | def _get_by_id(self, repo_name): | |
234 | """ |
|
235 | """ | |
235 | Gets a special pattern _<ID> from clone url and tries to replace it |
|
236 | Gets a special pattern _<ID> from clone url and tries to replace it | |
236 | with a repository_name for support of _<ID> non changeable urls |
|
237 | with a repository_name for support of _<ID> non changeable urls | |
237 | """ |
|
238 | """ | |
238 |
|
239 | |||
239 | data = repo_name.split('/') |
|
240 | data = repo_name.split('/') | |
240 | if len(data) >= 2: |
|
241 | if len(data) >= 2: | |
241 | from rhodecode.model.repo import RepoModel |
|
242 | from rhodecode.model.repo import RepoModel | |
242 | by_id_match = RepoModel().get_repo_by_id(repo_name) |
|
243 | by_id_match = RepoModel().get_repo_by_id(repo_name) | |
243 | if by_id_match: |
|
244 | if by_id_match: | |
244 | data[1] = by_id_match.repo_name |
|
245 | data[1] = by_id_match.repo_name | |
245 |
|
246 | |||
246 | return safe_str('/'.join(data)) |
|
247 | return safe_str('/'.join(data)) | |
247 |
|
248 | |||
248 | def _invalidate_cache(self, repo_name): |
|
249 | def _invalidate_cache(self, repo_name): | |
249 | """ |
|
250 | """ | |
250 | Set's cache for this repository for invalidation on next access |
|
251 | Set's cache for this repository for invalidation on next access | |
251 |
|
252 | |||
252 | :param repo_name: full repo name, also a cache key |
|
253 | :param repo_name: full repo name, also a cache key | |
253 | """ |
|
254 | """ | |
254 | ScmModel().mark_for_invalidation(repo_name) |
|
255 | ScmModel().mark_for_invalidation(repo_name) | |
255 |
|
256 | |||
256 | def is_valid_and_existing_repo(self, repo_name, base_path, scm_type): |
|
257 | def is_valid_and_existing_repo(self, repo_name, base_path, scm_type): | |
257 | db_repo = Repository.get_by_repo_name(repo_name) |
|
258 | db_repo = Repository.get_by_repo_name(repo_name) | |
258 | if not db_repo: |
|
259 | if not db_repo: | |
259 | log.debug('Repository `%s` not found inside the database.', |
|
260 | log.debug('Repository `%s` not found inside the database.', | |
260 | repo_name) |
|
261 | repo_name) | |
261 | return False |
|
262 | return False | |
262 |
|
263 | |||
263 | if db_repo.repo_type != scm_type: |
|
264 | if db_repo.repo_type != scm_type: | |
264 | log.warning( |
|
265 | log.warning( | |
265 | 'Repository `%s` have incorrect scm_type, expected %s got %s', |
|
266 | 'Repository `%s` have incorrect scm_type, expected %s got %s', | |
266 | repo_name, db_repo.repo_type, scm_type) |
|
267 | repo_name, db_repo.repo_type, scm_type) | |
267 | return False |
|
268 | return False | |
268 |
|
269 | |||
269 | config = db_repo._config |
|
270 | config = db_repo._config | |
270 | config.set('extensions', 'largefiles', '') |
|
271 | config.set('extensions', 'largefiles', '') | |
271 | return is_valid_repo( |
|
272 | return is_valid_repo( | |
272 | repo_name, base_path, |
|
273 | repo_name, base_path, | |
273 | explicit_scm=scm_type, expect_scm=scm_type, config=config) |
|
274 | explicit_scm=scm_type, expect_scm=scm_type, config=config) | |
274 |
|
275 | |||
275 | def valid_and_active_user(self, user): |
|
276 | def valid_and_active_user(self, user): | |
276 | """ |
|
277 | """ | |
277 | Checks if that user is not empty, and if it's actually object it checks |
|
278 | Checks if that user is not empty, and if it's actually object it checks | |
278 | if he's active. |
|
279 | if he's active. | |
279 |
|
280 | |||
280 | :param user: user object or None |
|
281 | :param user: user object or None | |
281 | :return: boolean |
|
282 | :return: boolean | |
282 | """ |
|
283 | """ | |
283 | if user is None: |
|
284 | if user is None: | |
284 | return False |
|
285 | return False | |
285 |
|
286 | |||
286 | elif user.active: |
|
287 | elif user.active: | |
287 | return True |
|
288 | return True | |
288 |
|
289 | |||
289 | return False |
|
290 | return False | |
290 |
|
291 | |||
291 | @property |
|
292 | @property | |
292 | def is_shadow_repo_dir(self): |
|
293 | def is_shadow_repo_dir(self): | |
293 | return os.path.isdir(self.vcs_repo_name) |
|
294 | return os.path.isdir(self.vcs_repo_name) | |
294 |
|
295 | |||
295 | def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None, |
|
296 | def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None, | |
296 | plugin_id='', plugin_cache_active=False, cache_ttl=0): |
|
297 | plugin_id='', plugin_cache_active=False, cache_ttl=0): | |
297 | """ |
|
298 | """ | |
298 | Checks permissions using action (push/pull) user and repository |
|
299 | Checks permissions using action (push/pull) user and repository | |
299 | name. If plugin_cache and ttl is set it will use the plugin which |
|
300 | name. If plugin_cache and ttl is set it will use the plugin which | |
300 | authenticated the user to store the cached permissions result for N |
|
301 | authenticated the user to store the cached permissions result for N | |
301 | amount of seconds as in cache_ttl |
|
302 | amount of seconds as in cache_ttl | |
302 |
|
303 | |||
303 | :param action: push or pull action |
|
304 | :param action: push or pull action | |
304 | :param user: user instance |
|
305 | :param user: user instance | |
305 | :param repo_name: repository name |
|
306 | :param repo_name: repository name | |
306 | """ |
|
307 | """ | |
307 |
|
308 | |||
308 | log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)', |
|
309 | log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)', | |
309 | plugin_id, plugin_cache_active, cache_ttl) |
|
310 | plugin_id, plugin_cache_active, cache_ttl) | |
310 |
|
311 | |||
311 | user_id = user.user_id |
|
312 | user_id = user.user_id | |
312 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) |
|
313 | cache_namespace_uid = 'cache_user_auth.{}'.format(user_id) | |
313 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) |
|
314 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) | |
314 |
|
315 | |||
315 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
316 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |
316 | expiration_time=cache_ttl, |
|
317 | expiration_time=cache_ttl, | |
317 | condition=plugin_cache_active) |
|
318 | condition=plugin_cache_active) | |
318 | def compute_perm_vcs( |
|
319 | def compute_perm_vcs( | |
319 | cache_name, plugin_id, action, user_id, repo_name, ip_addr): |
|
320 | cache_name, plugin_id, action, user_id, repo_name, ip_addr): | |
320 |
|
321 | |||
321 | log.debug('auth: calculating permission access now...') |
|
322 | log.debug('auth: calculating permission access now...') | |
322 | # check IP |
|
323 | # check IP | |
323 | inherit = user.inherit_default_permissions |
|
324 | inherit = user.inherit_default_permissions | |
324 | ip_allowed = AuthUser.check_ip_allowed( |
|
325 | ip_allowed = AuthUser.check_ip_allowed( | |
325 | user_id, ip_addr, inherit_from_default=inherit) |
|
326 | user_id, ip_addr, inherit_from_default=inherit) | |
326 | if ip_allowed: |
|
327 | if ip_allowed: | |
327 | log.info('Access for IP:%s allowed', ip_addr) |
|
328 | log.info('Access for IP:%s allowed', ip_addr) | |
328 | else: |
|
329 | else: | |
329 | return False |
|
330 | return False | |
330 |
|
331 | |||
331 | if action == 'push': |
|
332 | if action == 'push': | |
332 | perms = ('repository.write', 'repository.admin') |
|
333 | perms = ('repository.write', 'repository.admin') | |
333 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): |
|
334 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): | |
334 | return False |
|
335 | return False | |
335 |
|
336 | |||
336 | else: |
|
337 | else: | |
337 | # any other action need at least read permission |
|
338 | # any other action need at least read permission | |
338 | perms = ( |
|
339 | perms = ( | |
339 | 'repository.read', 'repository.write', 'repository.admin') |
|
340 | 'repository.read', 'repository.write', 'repository.admin') | |
340 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): |
|
341 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): | |
341 | return False |
|
342 | return False | |
342 |
|
343 | |||
343 | return True |
|
344 | return True | |
344 |
|
345 | |||
345 | start = time.time() |
|
346 | start = time.time() | |
346 | log.debug('Running plugin `%s` permissions check', plugin_id) |
|
347 | log.debug('Running plugin `%s` permissions check', plugin_id) | |
347 |
|
348 | |||
348 | # for environ based auth, password can be empty, but then the validation is |
|
349 | # for environ based auth, password can be empty, but then the validation is | |
349 | # on the server that fills in the env data needed for authentication |
|
350 | # on the server that fills in the env data needed for authentication | |
350 | perm_result = compute_perm_vcs( |
|
351 | perm_result = compute_perm_vcs( | |
351 | 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr) |
|
352 | 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr) | |
352 |
|
353 | |||
353 | auth_time = time.time() - start |
|
354 | auth_time = time.time() - start | |
354 | log.debug('Permissions for plugin `%s` completed in %.4fs, ' |
|
355 | log.debug('Permissions for plugin `%s` completed in %.4fs, ' | |
355 | 'expiration time of fetched cache %.1fs.', |
|
356 | 'expiration time of fetched cache %.1fs.', | |
356 | plugin_id, auth_time, cache_ttl) |
|
357 | plugin_id, auth_time, cache_ttl) | |
357 |
|
358 | |||
358 | return perm_result |
|
359 | return perm_result | |
359 |
|
360 | |||
360 | def _get_http_scheme(self, environ): |
|
361 | def _get_http_scheme(self, environ): | |
361 | try: |
|
362 | try: | |
362 | return environ['wsgi.url_scheme'] |
|
363 | return environ['wsgi.url_scheme'] | |
363 | except Exception: |
|
364 | except Exception: | |
364 | log.exception('Failed to read http scheme') |
|
365 | log.exception('Failed to read http scheme') | |
365 | return 'http' |
|
366 | return 'http' | |
366 |
|
367 | |||
367 | def _check_ssl(self, environ, start_response): |
|
368 | def _check_ssl(self, environ, start_response): | |
368 | """ |
|
369 | """ | |
369 | Checks the SSL check flag and returns False if SSL is not present |
|
370 | Checks the SSL check flag and returns False if SSL is not present | |
370 | and required True otherwise |
|
371 | and required True otherwise | |
371 | """ |
|
372 | """ | |
372 | org_proto = environ['wsgi._org_proto'] |
|
373 | org_proto = environ['wsgi._org_proto'] | |
373 | # check if we have SSL required ! if not it's a bad request ! |
|
374 | # check if we have SSL required ! if not it's a bad request ! | |
374 | require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl')) |
|
375 | require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl')) | |
375 | if require_ssl and org_proto == 'http': |
|
376 | if require_ssl and org_proto == 'http': | |
376 | log.debug( |
|
377 | log.debug( | |
377 | 'Bad request: detected protocol is `%s` and ' |
|
378 | 'Bad request: detected protocol is `%s` and ' | |
378 | 'SSL/HTTPS is required.', org_proto) |
|
379 | 'SSL/HTTPS is required.', org_proto) | |
379 | return False |
|
380 | return False | |
380 | return True |
|
381 | return True | |
381 |
|
382 | |||
382 | def _get_default_cache_ttl(self): |
|
383 | def _get_default_cache_ttl(self): | |
383 | # take AUTH_CACHE_TTL from the `rhodecode` auth plugin |
|
384 | # take AUTH_CACHE_TTL from the `rhodecode` auth plugin | |
384 | plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode') |
|
385 | plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode') | |
385 | plugin_settings = plugin.get_settings() |
|
386 | plugin_settings = plugin.get_settings() | |
386 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache( |
|
387 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache( | |
387 | plugin_settings) or (False, 0) |
|
388 | plugin_settings) or (False, 0) | |
388 | return plugin_cache_active, cache_ttl |
|
389 | return plugin_cache_active, cache_ttl | |
389 |
|
390 | |||
390 | def __call__(self, environ, start_response): |
|
391 | def __call__(self, environ, start_response): | |
391 | try: |
|
392 | try: | |
392 | return self._handle_request(environ, start_response) |
|
393 | return self._handle_request(environ, start_response) | |
393 | except Exception: |
|
394 | except Exception: | |
394 | log.exception("Exception while handling request") |
|
395 | log.exception("Exception while handling request") | |
395 | appenlight.track_exception(environ) |
|
396 | appenlight.track_exception(environ) | |
396 | return HTTPInternalServerError()(environ, start_response) |
|
397 | return HTTPInternalServerError()(environ, start_response) | |
397 | finally: |
|
398 | finally: | |
398 | meta.Session.remove() |
|
399 | meta.Session.remove() | |
399 |
|
400 | |||
400 | def _handle_request(self, environ, start_response): |
|
401 | def _handle_request(self, environ, start_response): | |
401 | if not self._check_ssl(environ, start_response): |
|
402 | if not self._check_ssl(environ, start_response): | |
402 | reason = ('SSL required, while RhodeCode was unable ' |
|
403 | reason = ('SSL required, while RhodeCode was unable ' | |
403 | 'to detect this as SSL request') |
|
404 | 'to detect this as SSL request') | |
404 | log.debug('User not allowed to proceed, %s', reason) |
|
405 | log.debug('User not allowed to proceed, %s', reason) | |
405 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
406 | return HTTPNotAcceptable(reason)(environ, start_response) | |
406 |
|
407 | |||
407 | if not self.url_repo_name: |
|
408 | if not self.url_repo_name: | |
408 | log.warning('Repository name is empty: %s', self.url_repo_name) |
|
409 | log.warning('Repository name is empty: %s', self.url_repo_name) | |
409 | # failed to get repo name, we fail now |
|
410 | # failed to get repo name, we fail now | |
410 | return HTTPNotFound()(environ, start_response) |
|
411 | return HTTPNotFound()(environ, start_response) | |
411 | log.debug('Extracted repo name is %s', self.url_repo_name) |
|
412 | log.debug('Extracted repo name is %s', self.url_repo_name) | |
412 |
|
413 | |||
413 | ip_addr = get_ip_addr(environ) |
|
414 | ip_addr = get_ip_addr(environ) | |
414 | user_agent = get_user_agent(environ) |
|
415 | user_agent = get_user_agent(environ) | |
415 | username = None |
|
416 | username = None | |
416 |
|
417 | |||
417 | # skip passing error to error controller |
|
418 | # skip passing error to error controller | |
418 | environ['pylons.status_code_redirect'] = True |
|
419 | environ['pylons.status_code_redirect'] = True | |
419 |
|
420 | |||
420 | # ====================================================================== |
|
421 | # ====================================================================== | |
421 | # GET ACTION PULL or PUSH |
|
422 | # GET ACTION PULL or PUSH | |
422 | # ====================================================================== |
|
423 | # ====================================================================== | |
423 | action = self._get_action(environ) |
|
424 | action = self._get_action(environ) | |
424 |
|
425 | |||
425 | # ====================================================================== |
|
426 | # ====================================================================== | |
426 | # Check if this is a request to a shadow repository of a pull request. |
|
427 | # Check if this is a request to a shadow repository of a pull request. | |
427 | # In this case only pull action is allowed. |
|
428 | # In this case only pull action is allowed. | |
428 | # ====================================================================== |
|
429 | # ====================================================================== | |
429 | if self.is_shadow_repo and action != 'pull': |
|
430 | if self.is_shadow_repo and action != 'pull': | |
430 | reason = 'Only pull action is allowed for shadow repositories.' |
|
431 | reason = 'Only pull action is allowed for shadow repositories.' | |
431 | log.debug('User not allowed to proceed, %s', reason) |
|
432 | log.debug('User not allowed to proceed, %s', reason) | |
432 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
433 | return HTTPNotAcceptable(reason)(environ, start_response) | |
433 |
|
434 | |||
434 | # Check if the shadow repo actually exists, in case someone refers |
|
435 | # Check if the shadow repo actually exists, in case someone refers | |
435 | # to it, and it has been deleted because of successful merge. |
|
436 | # to it, and it has been deleted because of successful merge. | |
436 | if self.is_shadow_repo and not self.is_shadow_repo_dir: |
|
437 | if self.is_shadow_repo and not self.is_shadow_repo_dir: | |
437 | log.debug( |
|
438 | log.debug( | |
438 | 'Shadow repo detected, and shadow repo dir `%s` is missing', |
|
439 | 'Shadow repo detected, and shadow repo dir `%s` is missing', | |
439 | self.is_shadow_repo_dir) |
|
440 | self.is_shadow_repo_dir) | |
440 | return HTTPNotFound()(environ, start_response) |
|
441 | return HTTPNotFound()(environ, start_response) | |
441 |
|
442 | |||
442 | # ====================================================================== |
|
443 | # ====================================================================== | |
443 | # CHECK ANONYMOUS PERMISSION |
|
444 | # CHECK ANONYMOUS PERMISSION | |
444 | # ====================================================================== |
|
445 | # ====================================================================== | |
445 | detect_force_push = False |
|
446 | detect_force_push = False | |
446 | check_branch_perms = False |
|
447 | check_branch_perms = False | |
447 | if action in ['pull', 'push']: |
|
448 | if action in ['pull', 'push']: | |
448 | user_obj = anonymous_user = User.get_default_user() |
|
449 | user_obj = anonymous_user = User.get_default_user() | |
449 | auth_user = user_obj.AuthUser() |
|
450 | auth_user = user_obj.AuthUser() | |
450 | username = anonymous_user.username |
|
451 | username = anonymous_user.username | |
451 | if anonymous_user.active: |
|
452 | if anonymous_user.active: | |
452 | plugin_cache_active, cache_ttl = self._get_default_cache_ttl() |
|
453 | plugin_cache_active, cache_ttl = self._get_default_cache_ttl() | |
453 | # ONLY check permissions if the user is activated |
|
454 | # ONLY check permissions if the user is activated | |
454 | anonymous_perm = self._check_permission( |
|
455 | anonymous_perm = self._check_permission( | |
455 | action, anonymous_user, auth_user, self.acl_repo_name, ip_addr, |
|
456 | action, anonymous_user, auth_user, self.acl_repo_name, ip_addr, | |
456 | plugin_id='anonymous_access', |
|
457 | plugin_id='anonymous_access', | |
457 | plugin_cache_active=plugin_cache_active, |
|
458 | plugin_cache_active=plugin_cache_active, | |
458 | cache_ttl=cache_ttl, |
|
459 | cache_ttl=cache_ttl, | |
459 | ) |
|
460 | ) | |
460 | else: |
|
461 | else: | |
461 | anonymous_perm = False |
|
462 | anonymous_perm = False | |
462 |
|
463 | |||
463 | if not anonymous_user.active or not anonymous_perm: |
|
464 | if not anonymous_user.active or not anonymous_perm: | |
464 | if not anonymous_user.active: |
|
465 | if not anonymous_user.active: | |
465 | log.debug('Anonymous access is disabled, running ' |
|
466 | log.debug('Anonymous access is disabled, running ' | |
466 | 'authentication') |
|
467 | 'authentication') | |
467 |
|
468 | |||
468 | if not anonymous_perm: |
|
469 | if not anonymous_perm: | |
469 | log.debug('Not enough credentials to access this ' |
|
470 | log.debug('Not enough credentials to access this ' | |
470 | 'repository as anonymous user') |
|
471 | 'repository as anonymous user') | |
471 |
|
472 | |||
472 | username = None |
|
473 | username = None | |
473 | # ============================================================== |
|
474 | # ============================================================== | |
474 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
475 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE | |
475 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
476 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS | |
476 | # ============================================================== |
|
477 | # ============================================================== | |
477 |
|
478 | |||
478 | # try to auth based on environ, container auth methods |
|
479 | # try to auth based on environ, container auth methods | |
479 | log.debug('Running PRE-AUTH for container based authentication') |
|
480 | log.debug('Running PRE-AUTH for container based authentication') | |
480 | pre_auth = authenticate( |
|
481 | pre_auth = authenticate( | |
481 | '', '', environ, VCS_TYPE, registry=self.registry, |
|
482 | '', '', environ, VCS_TYPE, registry=self.registry, | |
482 | acl_repo_name=self.acl_repo_name) |
|
483 | acl_repo_name=self.acl_repo_name) | |
483 | if pre_auth and pre_auth.get('username'): |
|
484 | if pre_auth and pre_auth.get('username'): | |
484 | username = pre_auth['username'] |
|
485 | username = pre_auth['username'] | |
485 | log.debug('PRE-AUTH got %s as username', username) |
|
486 | log.debug('PRE-AUTH got %s as username', username) | |
486 | if pre_auth: |
|
487 | if pre_auth: | |
487 | log.debug('PRE-AUTH successful from %s', |
|
488 | log.debug('PRE-AUTH successful from %s', | |
488 | pre_auth.get('auth_data', {}).get('_plugin')) |
|
489 | pre_auth.get('auth_data', {}).get('_plugin')) | |
489 |
|
490 | |||
490 | # If not authenticated by the container, running basic auth |
|
491 | # If not authenticated by the container, running basic auth | |
491 | # before inject the calling repo_name for special scope checks |
|
492 | # before inject the calling repo_name for special scope checks | |
492 | self.authenticate.acl_repo_name = self.acl_repo_name |
|
493 | self.authenticate.acl_repo_name = self.acl_repo_name | |
493 |
|
494 | |||
494 | plugin_cache_active, cache_ttl = False, 0 |
|
495 | plugin_cache_active, cache_ttl = False, 0 | |
495 | plugin = None |
|
496 | plugin = None | |
496 | if not username: |
|
497 | if not username: | |
497 | self.authenticate.realm = self.authenticate.get_rc_realm() |
|
498 | self.authenticate.realm = self.authenticate.get_rc_realm() | |
498 |
|
499 | |||
499 | try: |
|
500 | try: | |
500 | auth_result = self.authenticate(environ) |
|
501 | auth_result = self.authenticate(environ) | |
501 | except (UserCreationError, NotAllowedToCreateUserError) as e: |
|
502 | except (UserCreationError, NotAllowedToCreateUserError) as e: | |
502 | log.error(e) |
|
503 | log.error(e) | |
503 | reason = safe_str(e) |
|
504 | reason = safe_str(e) | |
504 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
505 | return HTTPNotAcceptable(reason)(environ, start_response) | |
505 |
|
506 | |||
506 | if isinstance(auth_result, dict): |
|
507 | if isinstance(auth_result, dict): | |
507 | AUTH_TYPE.update(environ, 'basic') |
|
508 | AUTH_TYPE.update(environ, 'basic') | |
508 | REMOTE_USER.update(environ, auth_result['username']) |
|
509 | REMOTE_USER.update(environ, auth_result['username']) | |
509 | username = auth_result['username'] |
|
510 | username = auth_result['username'] | |
510 | plugin = auth_result.get('auth_data', {}).get('_plugin') |
|
511 | plugin = auth_result.get('auth_data', {}).get('_plugin') | |
511 | log.info( |
|
512 | log.info( | |
512 | 'MAIN-AUTH successful for user `%s` from %s plugin', |
|
513 | 'MAIN-AUTH successful for user `%s` from %s plugin', | |
513 | username, plugin) |
|
514 | username, plugin) | |
514 |
|
515 | |||
515 | plugin_cache_active, cache_ttl = auth_result.get( |
|
516 | plugin_cache_active, cache_ttl = auth_result.get( | |
516 | 'auth_data', {}).get('_ttl_cache') or (False, 0) |
|
517 | 'auth_data', {}).get('_ttl_cache') or (False, 0) | |
517 | else: |
|
518 | else: | |
518 | return auth_result.wsgi_application(environ, start_response) |
|
519 | return auth_result.wsgi_application(environ, start_response) | |
519 |
|
520 | |||
520 | # ============================================================== |
|
521 | # ============================================================== | |
521 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
522 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME | |
522 | # ============================================================== |
|
523 | # ============================================================== | |
523 | user = User.get_by_username(username) |
|
524 | user = User.get_by_username(username) | |
524 | if not self.valid_and_active_user(user): |
|
525 | if not self.valid_and_active_user(user): | |
525 | return HTTPForbidden()(environ, start_response) |
|
526 | return HTTPForbidden()(environ, start_response) | |
526 | username = user.username |
|
527 | username = user.username | |
527 | user_id = user.user_id |
|
528 | user_id = user.user_id | |
528 |
|
529 | |||
529 | # check user attributes for password change flag |
|
530 | # check user attributes for password change flag | |
530 | user_obj = user |
|
531 | user_obj = user | |
531 | auth_user = user_obj.AuthUser() |
|
532 | auth_user = user_obj.AuthUser() | |
532 | if user_obj and user_obj.username != User.DEFAULT_USER and \ |
|
533 | if user_obj and user_obj.username != User.DEFAULT_USER and \ | |
533 | user_obj.user_data.get('force_password_change'): |
|
534 | user_obj.user_data.get('force_password_change'): | |
534 | reason = 'password change required' |
|
535 | reason = 'password change required' | |
535 | log.debug('User not allowed to authenticate, %s', reason) |
|
536 | log.debug('User not allowed to authenticate, %s', reason) | |
536 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
537 | return HTTPNotAcceptable(reason)(environ, start_response) | |
537 |
|
538 | |||
538 | # check permissions for this repository |
|
539 | # check permissions for this repository | |
539 | perm = self._check_permission( |
|
540 | perm = self._check_permission( | |
540 | action, user, auth_user, self.acl_repo_name, ip_addr, |
|
541 | action, user, auth_user, self.acl_repo_name, ip_addr, | |
541 | plugin, plugin_cache_active, cache_ttl) |
|
542 | plugin, plugin_cache_active, cache_ttl) | |
542 | if not perm: |
|
543 | if not perm: | |
543 | return HTTPForbidden()(environ, start_response) |
|
544 | return HTTPForbidden()(environ, start_response) | |
544 | environ['rc_auth_user_id'] = user_id |
|
545 | environ['rc_auth_user_id'] = user_id | |
545 |
|
546 | |||
546 | if action == 'push': |
|
547 | if action == 'push': | |
547 | perms = auth_user.get_branch_permissions(self.acl_repo_name) |
|
548 | perms = auth_user.get_branch_permissions(self.acl_repo_name) | |
548 | if perms: |
|
549 | if perms: | |
549 | check_branch_perms = True |
|
550 | check_branch_perms = True | |
550 | detect_force_push = True |
|
551 | detect_force_push = True | |
551 |
|
552 | |||
552 | # extras are injected into UI object and later available |
|
553 | # extras are injected into UI object and later available | |
553 | # in hooks executed by RhodeCode |
|
554 | # in hooks executed by RhodeCode | |
554 | check_locking = _should_check_locking(environ.get('QUERY_STRING')) |
|
555 | check_locking = _should_check_locking(environ.get('QUERY_STRING')) | |
555 |
|
556 | |||
556 | extras = vcs_operation_context( |
|
557 | extras = vcs_operation_context( | |
557 | environ, repo_name=self.acl_repo_name, username=username, |
|
558 | environ, repo_name=self.acl_repo_name, username=username, | |
558 | action=action, scm=self.SCM, check_locking=check_locking, |
|
559 | action=action, scm=self.SCM, check_locking=check_locking, | |
559 | is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms, |
|
560 | is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms, | |
560 | detect_force_push=detect_force_push |
|
561 | detect_force_push=detect_force_push | |
561 | ) |
|
562 | ) | |
562 |
|
563 | |||
563 | # ====================================================================== |
|
564 | # ====================================================================== | |
564 | # REQUEST HANDLING |
|
565 | # REQUEST HANDLING | |
565 | # ====================================================================== |
|
566 | # ====================================================================== | |
566 | repo_path = os.path.join( |
|
567 | repo_path = os.path.join( | |
567 | safe_str(self.base_path), safe_str(self.vcs_repo_name)) |
|
568 | safe_str(self.base_path), safe_str(self.vcs_repo_name)) | |
568 | log.debug('Repository path is %s', repo_path) |
|
569 | log.debug('Repository path is %s', repo_path) | |
569 |
|
570 | |||
570 | fix_PATH() |
|
571 | fix_PATH() | |
571 |
|
572 | |||
572 | log.info( |
|
573 | log.info( | |
573 | '%s action on %s repo "%s" by "%s" from %s %s', |
|
574 | '%s action on %s repo "%s" by "%s" from %s %s', | |
574 | action, self.SCM, safe_str(self.url_repo_name), |
|
575 | action, self.SCM, safe_str(self.url_repo_name), | |
575 | safe_str(username), ip_addr, user_agent) |
|
576 | safe_str(username), ip_addr, user_agent) | |
576 |
|
577 | |||
577 | return self._generate_vcs_response( |
|
578 | return self._generate_vcs_response( | |
578 | environ, start_response, repo_path, extras, action) |
|
579 | environ, start_response, repo_path, extras, action) | |
579 |
|
580 | |||
580 | @initialize_generator |
|
581 | @initialize_generator | |
581 | def _generate_vcs_response( |
|
582 | def _generate_vcs_response( | |
582 | self, environ, start_response, repo_path, extras, action): |
|
583 | self, environ, start_response, repo_path, extras, action): | |
583 | """ |
|
584 | """ | |
584 | Returns a generator for the response content. |
|
585 | Returns a generator for the response content. | |
585 |
|
586 | |||
586 | This method is implemented as a generator, so that it can trigger |
|
587 | This method is implemented as a generator, so that it can trigger | |
587 | the cache validation after all content sent back to the client. It |
|
588 | the cache validation after all content sent back to the client. It | |
588 | also handles the locking exceptions which will be triggered when |
|
589 | also handles the locking exceptions which will be triggered when | |
589 | the first chunk is produced by the underlying WSGI application. |
|
590 | the first chunk is produced by the underlying WSGI application. | |
590 | """ |
|
591 | """ | |
591 | txn_id = '' |
|
592 | txn_id = '' | |
592 | if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE': |
|
593 | if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE': | |
593 | # case for SVN, we want to re-use the callback daemon port |
|
594 | # case for SVN, we want to re-use the callback daemon port | |
594 | # so we use the txn_id, for this we peek the body, and still save |
|
595 | # so we use the txn_id, for this we peek the body, and still save | |
595 | # it as wsgi.input |
|
596 | # it as wsgi.input | |
596 | data = environ['wsgi.input'].read() |
|
597 | data = environ['wsgi.input'].read() | |
597 | environ['wsgi.input'] = StringIO(data) |
|
598 | environ['wsgi.input'] = StringIO(data) | |
598 | txn_id = extract_svn_txn_id(self.acl_repo_name, data) |
|
599 | txn_id = extract_svn_txn_id(self.acl_repo_name, data) | |
599 |
|
600 | |||
600 | callback_daemon, extras = self._prepare_callback_daemon( |
|
601 | callback_daemon, extras = self._prepare_callback_daemon( | |
601 | extras, environ, action, txn_id=txn_id) |
|
602 | extras, environ, action, txn_id=txn_id) | |
602 | log.debug('HOOKS extras is %s', extras) |
|
603 | log.debug('HOOKS extras is %s', extras) | |
603 |
|
604 | |||
604 | http_scheme = self._get_http_scheme(environ) |
|
605 | http_scheme = self._get_http_scheme(environ) | |
605 |
|
606 | |||
606 | config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme) |
|
607 | config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme) | |
607 | app = self._create_wsgi_app(repo_path, self.url_repo_name, config) |
|
608 | app = self._create_wsgi_app(repo_path, self.url_repo_name, config) | |
608 | with callback_daemon: |
|
609 | with callback_daemon: | |
609 | app.rc_extras = extras |
|
610 | app.rc_extras = extras | |
610 |
|
611 | |||
611 | try: |
|
612 | try: | |
612 | response = app(environ, start_response) |
|
613 | response = app(environ, start_response) | |
613 | finally: |
|
614 | finally: | |
614 | # This statement works together with the decorator |
|
615 | # This statement works together with the decorator | |
615 | # "initialize_generator" above. The decorator ensures that |
|
616 | # "initialize_generator" above. The decorator ensures that | |
616 | # we hit the first yield statement before the generator is |
|
617 | # we hit the first yield statement before the generator is | |
617 | # returned back to the WSGI server. This is needed to |
|
618 | # returned back to the WSGI server. This is needed to | |
618 | # ensure that the call to "app" above triggers the |
|
619 | # ensure that the call to "app" above triggers the | |
619 | # needed callback to "start_response" before the |
|
620 | # needed callback to "start_response" before the | |
620 | # generator is actually used. |
|
621 | # generator is actually used. | |
621 | yield "__init__" |
|
622 | yield "__init__" | |
622 |
|
623 | |||
623 | # iter content |
|
624 | # iter content | |
624 | for chunk in response: |
|
625 | for chunk in response: | |
625 | yield chunk |
|
626 | yield chunk | |
626 |
|
627 | |||
627 | try: |
|
628 | try: | |
628 | # invalidate cache on push |
|
629 | # invalidate cache on push | |
629 | if action == 'push': |
|
630 | if action == 'push': | |
630 | self._invalidate_cache(self.url_repo_name) |
|
631 | self._invalidate_cache(self.url_repo_name) | |
631 | finally: |
|
632 | finally: | |
632 | meta.Session.remove() |
|
633 | meta.Session.remove() | |
633 |
|
634 | |||
634 | def _get_repository_name(self, environ): |
|
635 | def _get_repository_name(self, environ): | |
635 | """Get repository name out of the environmnent |
|
636 | """Get repository name out of the environmnent | |
636 |
|
637 | |||
637 | :param environ: WSGI environment |
|
638 | :param environ: WSGI environment | |
638 | """ |
|
639 | """ | |
639 | raise NotImplementedError() |
|
640 | raise NotImplementedError() | |
640 |
|
641 | |||
641 | def _get_action(self, environ): |
|
642 | def _get_action(self, environ): | |
642 | """Map request commands into a pull or push command. |
|
643 | """Map request commands into a pull or push command. | |
643 |
|
644 | |||
644 | :param environ: WSGI environment |
|
645 | :param environ: WSGI environment | |
645 | """ |
|
646 | """ | |
646 | raise NotImplementedError() |
|
647 | raise NotImplementedError() | |
647 |
|
648 | |||
648 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
649 | def _create_wsgi_app(self, repo_path, repo_name, config): | |
649 | """Return the WSGI app that will finally handle the request.""" |
|
650 | """Return the WSGI app that will finally handle the request.""" | |
650 | raise NotImplementedError() |
|
651 | raise NotImplementedError() | |
651 |
|
652 | |||
652 | def _create_config(self, extras, repo_name, scheme='http'): |
|
653 | def _create_config(self, extras, repo_name, scheme='http'): | |
653 | """Create a safe config representation.""" |
|
654 | """Create a safe config representation.""" | |
654 | raise NotImplementedError() |
|
655 | raise NotImplementedError() | |
655 |
|
656 | |||
656 | def _should_use_callback_daemon(self, extras, environ, action): |
|
657 | def _should_use_callback_daemon(self, extras, environ, action): | |
657 | if extras.get('is_shadow_repo'): |
|
658 | if extras.get('is_shadow_repo'): | |
658 | # we don't want to execute hooks, and callback daemon for shadow repos |
|
659 | # we don't want to execute hooks, and callback daemon for shadow repos | |
659 | return False |
|
660 | return False | |
660 | return True |
|
661 | return True | |
661 |
|
662 | |||
662 | def _prepare_callback_daemon(self, extras, environ, action, txn_id=None): |
|
663 | def _prepare_callback_daemon(self, extras, environ, action, txn_id=None): | |
663 | direct_calls = vcs_settings.HOOKS_DIRECT_CALLS |
|
664 | direct_calls = vcs_settings.HOOKS_DIRECT_CALLS | |
664 | if not self._should_use_callback_daemon(extras, environ, action): |
|
665 | if not self._should_use_callback_daemon(extras, environ, action): | |
665 | # disable callback daemon for actions that don't require it |
|
666 | # disable callback daemon for actions that don't require it | |
666 | direct_calls = True |
|
667 | direct_calls = True | |
667 |
|
668 | |||
668 | return prepare_callback_daemon( |
|
669 | return prepare_callback_daemon( | |
669 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
670 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, | |
670 | host=vcs_settings.HOOKS_HOST, use_direct_calls=direct_calls, txn_id=txn_id) |
|
671 | host=vcs_settings.HOOKS_HOST, use_direct_calls=direct_calls, txn_id=txn_id) | |
671 |
|
672 | |||
672 |
|
673 | |||
673 | def _should_check_locking(query_string): |
|
674 | def _should_check_locking(query_string): | |
674 | # this is kind of hacky, but due to how mercurial handles client-server |
|
675 | # this is kind of hacky, but due to how mercurial handles client-server | |
675 | # server see all operation on commit; bookmarks, phases and |
|
676 | # server see all operation on commit; bookmarks, phases and | |
676 | # obsolescence marker in different transaction, we don't want to check |
|
677 | # obsolescence marker in different transaction, we don't want to check | |
677 | # locking on those |
|
678 | # locking on those | |
678 | return query_string not in ['cmd=listkeys'] |
|
679 | return query_string not in ['cmd=listkeys'] |
@@ -1,913 +1,920 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import hashlib |
|
22 | import hashlib | |
23 | import logging |
|
23 | import logging | |
24 | import re |
|
24 | import re | |
25 | from collections import namedtuple |
|
25 | from collections import namedtuple | |
26 | from functools import wraps |
|
26 | from functools import wraps | |
27 | import bleach |
|
27 | import bleach | |
28 | from pyramid.threadlocal import get_current_request |
|
28 | from pyramid.threadlocal import get_current_request, get_current_registry | |
29 |
|
29 | |||
30 | from rhodecode.lib import rc_cache |
|
30 | from rhodecode.lib import rc_cache | |
31 | from rhodecode.lib.utils2 import ( |
|
31 | from rhodecode.lib.utils2 import ( | |
32 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) |
|
32 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) | |
33 | from rhodecode.lib.vcs.backends import base |
|
33 | from rhodecode.lib.vcs.backends import base | |
34 | from rhodecode.model import BaseModel |
|
34 | from rhodecode.model import BaseModel | |
35 | from rhodecode.model.db import ( |
|
35 | from rhodecode.model.db import ( | |
36 | RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting, CacheKey) |
|
36 | RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting, CacheKey) | |
37 | from rhodecode.model.meta import Session |
|
37 | from rhodecode.model.meta import Session | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | UiSetting = namedtuple( |
|
43 | UiSetting = namedtuple( | |
44 | 'UiSetting', ['section', 'key', 'value', 'active']) |
|
44 | 'UiSetting', ['section', 'key', 'value', 'active']) | |
45 |
|
45 | |||
46 | SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google'] |
|
46 | SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google'] | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class SettingNotFound(Exception): |
|
49 | class SettingNotFound(Exception): | |
50 | def __init__(self, setting_id): |
|
50 | def __init__(self, setting_id): | |
51 | msg = 'Setting `{}` is not found'.format(setting_id) |
|
51 | msg = 'Setting `{}` is not found'.format(setting_id) | |
52 | super(SettingNotFound, self).__init__(msg) |
|
52 | super(SettingNotFound, self).__init__(msg) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | class SettingsModel(BaseModel): |
|
55 | class SettingsModel(BaseModel): | |
56 | BUILTIN_HOOKS = ( |
|
56 | BUILTIN_HOOKS = ( | |
57 | RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH, |
|
57 | RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH, | |
58 | RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH, |
|
58 | RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH, | |
59 | RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL, |
|
59 | RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL, | |
60 | RhodeCodeUi.HOOK_PUSH_KEY,) |
|
60 | RhodeCodeUi.HOOK_PUSH_KEY,) | |
61 | HOOKS_SECTION = 'hooks' |
|
61 | HOOKS_SECTION = 'hooks' | |
62 |
|
62 | |||
63 | def __init__(self, sa=None, repo=None): |
|
63 | def __init__(self, sa=None, repo=None): | |
64 | self.repo = repo |
|
64 | self.repo = repo | |
65 | self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi |
|
65 | self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi | |
66 | self.SettingsDbModel = ( |
|
66 | self.SettingsDbModel = ( | |
67 | RepoRhodeCodeSetting if repo else RhodeCodeSetting) |
|
67 | RepoRhodeCodeSetting if repo else RhodeCodeSetting) | |
68 | super(SettingsModel, self).__init__(sa) |
|
68 | super(SettingsModel, self).__init__(sa) | |
69 |
|
69 | |||
70 | def get_ui_by_key(self, key): |
|
70 | def get_ui_by_key(self, key): | |
71 | q = self.UiDbModel.query() |
|
71 | q = self.UiDbModel.query() | |
72 | q = q.filter(self.UiDbModel.ui_key == key) |
|
72 | q = q.filter(self.UiDbModel.ui_key == key) | |
73 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
73 | q = self._filter_by_repo(RepoRhodeCodeUi, q) | |
74 | return q.scalar() |
|
74 | return q.scalar() | |
75 |
|
75 | |||
76 | def get_ui_by_section(self, section): |
|
76 | def get_ui_by_section(self, section): | |
77 | q = self.UiDbModel.query() |
|
77 | q = self.UiDbModel.query() | |
78 | q = q.filter(self.UiDbModel.ui_section == section) |
|
78 | q = q.filter(self.UiDbModel.ui_section == section) | |
79 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
79 | q = self._filter_by_repo(RepoRhodeCodeUi, q) | |
80 | return q.all() |
|
80 | return q.all() | |
81 |
|
81 | |||
82 | def get_ui_by_section_and_key(self, section, key): |
|
82 | def get_ui_by_section_and_key(self, section, key): | |
83 | q = self.UiDbModel.query() |
|
83 | q = self.UiDbModel.query() | |
84 | q = q.filter(self.UiDbModel.ui_section == section) |
|
84 | q = q.filter(self.UiDbModel.ui_section == section) | |
85 | q = q.filter(self.UiDbModel.ui_key == key) |
|
85 | q = q.filter(self.UiDbModel.ui_key == key) | |
86 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
86 | q = self._filter_by_repo(RepoRhodeCodeUi, q) | |
87 | return q.scalar() |
|
87 | return q.scalar() | |
88 |
|
88 | |||
89 | def get_ui(self, section=None, key=None): |
|
89 | def get_ui(self, section=None, key=None): | |
90 | q = self.UiDbModel.query() |
|
90 | q = self.UiDbModel.query() | |
91 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
91 | q = self._filter_by_repo(RepoRhodeCodeUi, q) | |
92 |
|
92 | |||
93 | if section: |
|
93 | if section: | |
94 | q = q.filter(self.UiDbModel.ui_section == section) |
|
94 | q = q.filter(self.UiDbModel.ui_section == section) | |
95 | if key: |
|
95 | if key: | |
96 | q = q.filter(self.UiDbModel.ui_key == key) |
|
96 | q = q.filter(self.UiDbModel.ui_key == key) | |
97 |
|
97 | |||
98 | # TODO: mikhail: add caching |
|
98 | # TODO: mikhail: add caching | |
99 | result = [ |
|
99 | result = [ | |
100 | UiSetting( |
|
100 | UiSetting( | |
101 | section=safe_str(r.ui_section), key=safe_str(r.ui_key), |
|
101 | section=safe_str(r.ui_section), key=safe_str(r.ui_key), | |
102 | value=safe_str(r.ui_value), active=r.ui_active |
|
102 | value=safe_str(r.ui_value), active=r.ui_active | |
103 | ) |
|
103 | ) | |
104 | for r in q.all() |
|
104 | for r in q.all() | |
105 | ] |
|
105 | ] | |
106 | return result |
|
106 | return result | |
107 |
|
107 | |||
108 | def get_builtin_hooks(self): |
|
108 | def get_builtin_hooks(self): | |
109 | q = self.UiDbModel.query() |
|
109 | q = self.UiDbModel.query() | |
110 | q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) |
|
110 | q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) | |
111 | return self._get_hooks(q) |
|
111 | return self._get_hooks(q) | |
112 |
|
112 | |||
113 | def get_custom_hooks(self): |
|
113 | def get_custom_hooks(self): | |
114 | q = self.UiDbModel.query() |
|
114 | q = self.UiDbModel.query() | |
115 | q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) |
|
115 | q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) | |
116 | return self._get_hooks(q) |
|
116 | return self._get_hooks(q) | |
117 |
|
117 | |||
118 | def create_ui_section_value(self, section, val, key=None, active=True): |
|
118 | def create_ui_section_value(self, section, val, key=None, active=True): | |
119 | new_ui = self.UiDbModel() |
|
119 | new_ui = self.UiDbModel() | |
120 | new_ui.ui_section = section |
|
120 | new_ui.ui_section = section | |
121 | new_ui.ui_value = val |
|
121 | new_ui.ui_value = val | |
122 | new_ui.ui_active = active |
|
122 | new_ui.ui_active = active | |
123 |
|
123 | |||
124 | repository_id = '' |
|
124 | repository_id = '' | |
125 | if self.repo: |
|
125 | if self.repo: | |
126 | repo = self._get_repo(self.repo) |
|
126 | repo = self._get_repo(self.repo) | |
127 | repository_id = repo.repo_id |
|
127 | repository_id = repo.repo_id | |
128 | new_ui.repository_id = repository_id |
|
128 | new_ui.repository_id = repository_id | |
129 |
|
129 | |||
130 | if not key: |
|
130 | if not key: | |
131 | # keys are unique so they need appended info |
|
131 | # keys are unique so they need appended info | |
132 | if self.repo: |
|
132 | if self.repo: | |
133 | key = hashlib.sha1( |
|
133 | key = hashlib.sha1( | |
134 | '{}{}{}'.format(section, val, repository_id)).hexdigest() |
|
134 | '{}{}{}'.format(section, val, repository_id)).hexdigest() | |
135 | else: |
|
135 | else: | |
136 | key = hashlib.sha1('{}{}'.format(section, val)).hexdigest() |
|
136 | key = hashlib.sha1('{}{}'.format(section, val)).hexdigest() | |
137 |
|
137 | |||
138 | new_ui.ui_key = key |
|
138 | new_ui.ui_key = key | |
139 |
|
139 | |||
140 | Session().add(new_ui) |
|
140 | Session().add(new_ui) | |
141 | return new_ui |
|
141 | return new_ui | |
142 |
|
142 | |||
143 | def create_or_update_hook(self, key, value): |
|
143 | def create_or_update_hook(self, key, value): | |
144 | ui = ( |
|
144 | ui = ( | |
145 | self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or |
|
145 | self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or | |
146 | self.UiDbModel()) |
|
146 | self.UiDbModel()) | |
147 | ui.ui_section = self.HOOKS_SECTION |
|
147 | ui.ui_section = self.HOOKS_SECTION | |
148 | ui.ui_active = True |
|
148 | ui.ui_active = True | |
149 | ui.ui_key = key |
|
149 | ui.ui_key = key | |
150 | ui.ui_value = value |
|
150 | ui.ui_value = value | |
151 |
|
151 | |||
152 | if self.repo: |
|
152 | if self.repo: | |
153 | repo = self._get_repo(self.repo) |
|
153 | repo = self._get_repo(self.repo) | |
154 | repository_id = repo.repo_id |
|
154 | repository_id = repo.repo_id | |
155 | ui.repository_id = repository_id |
|
155 | ui.repository_id = repository_id | |
156 |
|
156 | |||
157 | Session().add(ui) |
|
157 | Session().add(ui) | |
158 | return ui |
|
158 | return ui | |
159 |
|
159 | |||
160 | def delete_ui(self, id_): |
|
160 | def delete_ui(self, id_): | |
161 | ui = self.UiDbModel.get(id_) |
|
161 | ui = self.UiDbModel.get(id_) | |
162 | if not ui: |
|
162 | if not ui: | |
163 | raise SettingNotFound(id_) |
|
163 | raise SettingNotFound(id_) | |
164 | Session().delete(ui) |
|
164 | Session().delete(ui) | |
165 |
|
165 | |||
166 | def get_setting_by_name(self, name): |
|
166 | def get_setting_by_name(self, name): | |
167 | q = self._get_settings_query() |
|
167 | q = self._get_settings_query() | |
168 | q = q.filter(self.SettingsDbModel.app_settings_name == name) |
|
168 | q = q.filter(self.SettingsDbModel.app_settings_name == name) | |
169 | return q.scalar() |
|
169 | return q.scalar() | |
170 |
|
170 | |||
171 | def create_or_update_setting( |
|
171 | def create_or_update_setting( | |
172 | self, name, val=Optional(''), type_=Optional('unicode')): |
|
172 | self, name, val=Optional(''), type_=Optional('unicode')): | |
173 | """ |
|
173 | """ | |
174 | Creates or updates RhodeCode setting. If updates is triggered it will |
|
174 | Creates or updates RhodeCode setting. If updates is triggered it will | |
175 | only update parameters that are explicityl set Optional instance will |
|
175 | only update parameters that are explicityl set Optional instance will | |
176 | be skipped |
|
176 | be skipped | |
177 |
|
177 | |||
178 | :param name: |
|
178 | :param name: | |
179 | :param val: |
|
179 | :param val: | |
180 | :param type_: |
|
180 | :param type_: | |
181 | :return: |
|
181 | :return: | |
182 | """ |
|
182 | """ | |
183 |
|
183 | |||
184 | res = self.get_setting_by_name(name) |
|
184 | res = self.get_setting_by_name(name) | |
185 | repo = self._get_repo(self.repo) if self.repo else None |
|
185 | repo = self._get_repo(self.repo) if self.repo else None | |
186 |
|
186 | |||
187 | if not res: |
|
187 | if not res: | |
188 | val = Optional.extract(val) |
|
188 | val = Optional.extract(val) | |
189 | type_ = Optional.extract(type_) |
|
189 | type_ = Optional.extract(type_) | |
190 |
|
190 | |||
191 | args = ( |
|
191 | args = ( | |
192 | (repo.repo_id, name, val, type_) |
|
192 | (repo.repo_id, name, val, type_) | |
193 | if repo else (name, val, type_)) |
|
193 | if repo else (name, val, type_)) | |
194 | res = self.SettingsDbModel(*args) |
|
194 | res = self.SettingsDbModel(*args) | |
195 |
|
195 | |||
196 | else: |
|
196 | else: | |
197 | if self.repo: |
|
197 | if self.repo: | |
198 | res.repository_id = repo.repo_id |
|
198 | res.repository_id = repo.repo_id | |
199 |
|
199 | |||
200 | res.app_settings_name = name |
|
200 | res.app_settings_name = name | |
201 | if not isinstance(type_, Optional): |
|
201 | if not isinstance(type_, Optional): | |
202 | # update if set |
|
202 | # update if set | |
203 | res.app_settings_type = type_ |
|
203 | res.app_settings_type = type_ | |
204 | if not isinstance(val, Optional): |
|
204 | if not isinstance(val, Optional): | |
205 | # update if set |
|
205 | # update if set | |
206 | res.app_settings_value = val |
|
206 | res.app_settings_value = val | |
207 |
|
207 | |||
208 | Session().add(res) |
|
208 | Session().add(res) | |
209 | return res |
|
209 | return res | |
210 |
|
210 | |||
211 | def invalidate_settings_cache(self): |
|
211 | def invalidate_settings_cache(self): | |
212 | invalidation_namespace = CacheKey.SETTINGS_INVALIDATION_NAMESPACE |
|
212 | invalidation_namespace = CacheKey.SETTINGS_INVALIDATION_NAMESPACE | |
213 | CacheKey.set_invalidate(invalidation_namespace) |
|
213 | CacheKey.set_invalidate(invalidation_namespace) | |
214 |
|
214 | |||
215 | def get_all_settings(self, cache=False, from_request=True): |
|
215 | def get_all_settings(self, cache=False, from_request=True): | |
|
216 | from rhodecode.authentication.base import get_authn_registry | |||
|
217 | ||||
216 | # defines if we use GLOBAL, or PER_REPO |
|
218 | # defines if we use GLOBAL, or PER_REPO | |
217 | repo = self._get_repo(self.repo) if self.repo else None |
|
219 | repo = self._get_repo(self.repo) if self.repo else None | |
218 | key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app" |
|
220 | key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app" | |
219 |
|
221 | |||
220 | # initially try the requests context, this is the fastest |
|
222 | # initially try the requests context, this is the fastest | |
221 | # we only fetch global config |
|
223 | # we only fetch global config | |
222 | if from_request: |
|
224 | if from_request: | |
223 | request = get_current_request() |
|
225 | request = get_current_request() | |
224 |
|
226 | |||
225 | if request and not repo and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): |
|
227 | if request and not repo and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): | |
226 | rc_config = request.call_context.rc_config |
|
228 | rc_config = request.call_context.rc_config | |
227 | if rc_config: |
|
229 | if rc_config: | |
228 | return rc_config |
|
230 | return rc_config | |
229 |
|
231 | |||
230 | region = rc_cache.get_or_create_region('sql_cache_short') |
|
232 | region = rc_cache.get_or_create_region('sql_cache_short') | |
231 | invalidation_namespace = CacheKey.SETTINGS_INVALIDATION_NAMESPACE |
|
233 | invalidation_namespace = CacheKey.SETTINGS_INVALIDATION_NAMESPACE | |
232 |
|
234 | |||
233 | @region.conditional_cache_on_arguments(condition=cache) |
|
235 | @region.conditional_cache_on_arguments(condition=cache) | |
234 | def _get_all_settings(name, key): |
|
236 | def _get_all_settings(name, key): | |
235 | q = self._get_settings_query() |
|
237 | q = self._get_settings_query() | |
236 | if not q: |
|
238 | if not q: | |
237 | raise Exception('Could not get application settings !') |
|
239 | raise Exception('Could not get application settings !') | |
238 |
|
240 | |||
239 | settings = { |
|
241 | settings = { | |
240 | 'rhodecode_' + result.app_settings_name: result.app_settings_value |
|
242 | 'rhodecode_' + result.app_settings_name: result.app_settings_value | |
241 | for result in q |
|
243 | for result in q | |
242 | } |
|
244 | } | |
243 | return settings |
|
245 | return settings | |
244 |
|
246 | |||
245 | inv_context_manager = rc_cache.InvalidationContext( |
|
247 | inv_context_manager = rc_cache.InvalidationContext( | |
246 | uid='cache_settings', invalidation_namespace=invalidation_namespace) |
|
248 | uid='cache_settings', invalidation_namespace=invalidation_namespace) | |
247 | with inv_context_manager as invalidation_context: |
|
249 | with inv_context_manager as invalidation_context: | |
248 | # check for stored invalidation signal, and maybe purge the cache |
|
250 | # check for stored invalidation signal, and maybe purge the cache | |
249 | # before computing it again |
|
251 | # before computing it again | |
250 | if invalidation_context.should_invalidate(): |
|
252 | if invalidation_context.should_invalidate(): | |
251 | # NOTE:(marcink) we flush the whole sql_cache_short region, because it |
|
253 | # NOTE:(marcink) we flush the whole sql_cache_short region, because it | |
252 | # reads different settings etc. It's little too much but those caches |
|
254 | # reads different settings etc. It's little too much but those caches | |
253 | # are anyway very short lived and it's a safest way. |
|
255 | # are anyway very short lived and it's a safest way. | |
254 | region = rc_cache.get_or_create_region('sql_cache_short') |
|
256 | region = rc_cache.get_or_create_region('sql_cache_short') | |
255 | region.invalidate() |
|
257 | region.invalidate() | |
|
258 | registry = get_current_registry() | |||
|
259 | if registry: | |||
|
260 | authn_registry = get_authn_registry(registry) | |||
|
261 | if authn_registry: | |||
|
262 | authn_registry.invalidate_plugins_for_auth() | |||
256 |
|
263 | |||
257 | result = _get_all_settings('rhodecode_settings', key) |
|
264 | result = _get_all_settings('rhodecode_settings', key) | |
258 | log.debug('Fetching app settings for key: %s took: %.4fs', key, |
|
265 | log.debug('Fetching app settings for key: %s took: %.4fs', key, | |
259 | inv_context_manager.compute_time) |
|
266 | inv_context_manager.compute_time) | |
260 |
|
267 | |||
261 | return result |
|
268 | return result | |
262 |
|
269 | |||
263 | def get_auth_settings(self): |
|
270 | def get_auth_settings(self): | |
264 | q = self._get_settings_query() |
|
271 | q = self._get_settings_query() | |
265 | q = q.filter( |
|
272 | q = q.filter( | |
266 | self.SettingsDbModel.app_settings_name.startswith('auth_')) |
|
273 | self.SettingsDbModel.app_settings_name.startswith('auth_')) | |
267 | rows = q.all() |
|
274 | rows = q.all() | |
268 | auth_settings = { |
|
275 | auth_settings = { | |
269 | row.app_settings_name: row.app_settings_value for row in rows} |
|
276 | row.app_settings_name: row.app_settings_value for row in rows} | |
270 | return auth_settings |
|
277 | return auth_settings | |
271 |
|
278 | |||
272 | def get_auth_plugins(self): |
|
279 | def get_auth_plugins(self): | |
273 | auth_plugins = self.get_setting_by_name("auth_plugins") |
|
280 | auth_plugins = self.get_setting_by_name("auth_plugins") | |
274 | return auth_plugins.app_settings_value |
|
281 | return auth_plugins.app_settings_value | |
275 |
|
282 | |||
276 | def get_default_repo_settings(self, strip_prefix=False): |
|
283 | def get_default_repo_settings(self, strip_prefix=False): | |
277 | q = self._get_settings_query() |
|
284 | q = self._get_settings_query() | |
278 | q = q.filter( |
|
285 | q = q.filter( | |
279 | self.SettingsDbModel.app_settings_name.startswith('default_')) |
|
286 | self.SettingsDbModel.app_settings_name.startswith('default_')) | |
280 | rows = q.all() |
|
287 | rows = q.all() | |
281 |
|
288 | |||
282 | result = {} |
|
289 | result = {} | |
283 | for row in rows: |
|
290 | for row in rows: | |
284 | key = row.app_settings_name |
|
291 | key = row.app_settings_name | |
285 | if strip_prefix: |
|
292 | if strip_prefix: | |
286 | key = remove_prefix(key, prefix='default_') |
|
293 | key = remove_prefix(key, prefix='default_') | |
287 | result.update({key: row.app_settings_value}) |
|
294 | result.update({key: row.app_settings_value}) | |
288 | return result |
|
295 | return result | |
289 |
|
296 | |||
290 | def get_repo(self): |
|
297 | def get_repo(self): | |
291 | repo = self._get_repo(self.repo) |
|
298 | repo = self._get_repo(self.repo) | |
292 | if not repo: |
|
299 | if not repo: | |
293 | raise Exception( |
|
300 | raise Exception( | |
294 | 'Repository `{}` cannot be found inside the database'.format( |
|
301 | 'Repository `{}` cannot be found inside the database'.format( | |
295 | self.repo)) |
|
302 | self.repo)) | |
296 | return repo |
|
303 | return repo | |
297 |
|
304 | |||
298 | def _filter_by_repo(self, model, query): |
|
305 | def _filter_by_repo(self, model, query): | |
299 | if self.repo: |
|
306 | if self.repo: | |
300 | repo = self.get_repo() |
|
307 | repo = self.get_repo() | |
301 | query = query.filter(model.repository_id == repo.repo_id) |
|
308 | query = query.filter(model.repository_id == repo.repo_id) | |
302 | return query |
|
309 | return query | |
303 |
|
310 | |||
304 | def _get_hooks(self, query): |
|
311 | def _get_hooks(self, query): | |
305 | query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION) |
|
312 | query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION) | |
306 | query = self._filter_by_repo(RepoRhodeCodeUi, query) |
|
313 | query = self._filter_by_repo(RepoRhodeCodeUi, query) | |
307 | return query.all() |
|
314 | return query.all() | |
308 |
|
315 | |||
309 | def _get_settings_query(self): |
|
316 | def _get_settings_query(self): | |
310 | q = self.SettingsDbModel.query() |
|
317 | q = self.SettingsDbModel.query() | |
311 | return self._filter_by_repo(RepoRhodeCodeSetting, q) |
|
318 | return self._filter_by_repo(RepoRhodeCodeSetting, q) | |
312 |
|
319 | |||
313 | def list_enabled_social_plugins(self, settings): |
|
320 | def list_enabled_social_plugins(self, settings): | |
314 | enabled = [] |
|
321 | enabled = [] | |
315 | for plug in SOCIAL_PLUGINS_LIST: |
|
322 | for plug in SOCIAL_PLUGINS_LIST: | |
316 | if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug) |
|
323 | if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug) | |
317 | )): |
|
324 | )): | |
318 | enabled.append(plug) |
|
325 | enabled.append(plug) | |
319 | return enabled |
|
326 | return enabled | |
320 |
|
327 | |||
321 |
|
328 | |||
322 | def assert_repo_settings(func): |
|
329 | def assert_repo_settings(func): | |
323 | @wraps(func) |
|
330 | @wraps(func) | |
324 | def _wrapper(self, *args, **kwargs): |
|
331 | def _wrapper(self, *args, **kwargs): | |
325 | if not self.repo_settings: |
|
332 | if not self.repo_settings: | |
326 | raise Exception('Repository is not specified') |
|
333 | raise Exception('Repository is not specified') | |
327 | return func(self, *args, **kwargs) |
|
334 | return func(self, *args, **kwargs) | |
328 | return _wrapper |
|
335 | return _wrapper | |
329 |
|
336 | |||
330 |
|
337 | |||
331 | class IssueTrackerSettingsModel(object): |
|
338 | class IssueTrackerSettingsModel(object): | |
332 | INHERIT_SETTINGS = 'inherit_issue_tracker_settings' |
|
339 | INHERIT_SETTINGS = 'inherit_issue_tracker_settings' | |
333 | SETTINGS_PREFIX = 'issuetracker_' |
|
340 | SETTINGS_PREFIX = 'issuetracker_' | |
334 |
|
341 | |||
335 | def __init__(self, sa=None, repo=None): |
|
342 | def __init__(self, sa=None, repo=None): | |
336 | self.global_settings = SettingsModel(sa=sa) |
|
343 | self.global_settings = SettingsModel(sa=sa) | |
337 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None |
|
344 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None | |
338 |
|
345 | |||
339 | @property |
|
346 | @property | |
340 | def inherit_global_settings(self): |
|
347 | def inherit_global_settings(self): | |
341 | if not self.repo_settings: |
|
348 | if not self.repo_settings: | |
342 | return True |
|
349 | return True | |
343 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) |
|
350 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) | |
344 | return setting.app_settings_value if setting else True |
|
351 | return setting.app_settings_value if setting else True | |
345 |
|
352 | |||
346 | @inherit_global_settings.setter |
|
353 | @inherit_global_settings.setter | |
347 | def inherit_global_settings(self, value): |
|
354 | def inherit_global_settings(self, value): | |
348 | if self.repo_settings: |
|
355 | if self.repo_settings: | |
349 | settings = self.repo_settings.create_or_update_setting( |
|
356 | settings = self.repo_settings.create_or_update_setting( | |
350 | self.INHERIT_SETTINGS, value, type_='bool') |
|
357 | self.INHERIT_SETTINGS, value, type_='bool') | |
351 | Session().add(settings) |
|
358 | Session().add(settings) | |
352 |
|
359 | |||
353 | def _get_keyname(self, key, uid, prefix=''): |
|
360 | def _get_keyname(self, key, uid, prefix=''): | |
354 | return '{0}{1}{2}_{3}'.format( |
|
361 | return '{0}{1}{2}_{3}'.format( | |
355 | prefix, self.SETTINGS_PREFIX, key, uid) |
|
362 | prefix, self.SETTINGS_PREFIX, key, uid) | |
356 |
|
363 | |||
357 | def _make_dict_for_settings(self, qs): |
|
364 | def _make_dict_for_settings(self, qs): | |
358 | prefix_match = self._get_keyname('pat', '', 'rhodecode_') |
|
365 | prefix_match = self._get_keyname('pat', '', 'rhodecode_') | |
359 |
|
366 | |||
360 | issuetracker_entries = {} |
|
367 | issuetracker_entries = {} | |
361 | # create keys |
|
368 | # create keys | |
362 | for k, v in qs.items(): |
|
369 | for k, v in qs.items(): | |
363 | if k.startswith(prefix_match): |
|
370 | if k.startswith(prefix_match): | |
364 | uid = k[len(prefix_match):] |
|
371 | uid = k[len(prefix_match):] | |
365 | issuetracker_entries[uid] = None |
|
372 | issuetracker_entries[uid] = None | |
366 |
|
373 | |||
367 | def url_cleaner(input_str): |
|
374 | def url_cleaner(input_str): | |
368 | input_str = input_str.replace('"', '').replace("'", '') |
|
375 | input_str = input_str.replace('"', '').replace("'", '') | |
369 | input_str = bleach.clean(input_str, strip=True) |
|
376 | input_str = bleach.clean(input_str, strip=True) | |
370 | return input_str |
|
377 | return input_str | |
371 |
|
378 | |||
372 | # populate |
|
379 | # populate | |
373 | for uid in issuetracker_entries: |
|
380 | for uid in issuetracker_entries: | |
374 | url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_')) |
|
381 | url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_')) | |
375 |
|
382 | |||
376 | pat = qs.get(self._get_keyname('pat', uid, 'rhodecode_')) |
|
383 | pat = qs.get(self._get_keyname('pat', uid, 'rhodecode_')) | |
377 | try: |
|
384 | try: | |
378 | pat_compiled = re.compile(r'%s' % pat) |
|
385 | pat_compiled = re.compile(r'%s' % pat) | |
379 | except re.error: |
|
386 | except re.error: | |
380 | pat_compiled = None |
|
387 | pat_compiled = None | |
381 |
|
388 | |||
382 | issuetracker_entries[uid] = AttributeDict({ |
|
389 | issuetracker_entries[uid] = AttributeDict({ | |
383 | 'pat': pat, |
|
390 | 'pat': pat, | |
384 | 'pat_compiled': pat_compiled, |
|
391 | 'pat_compiled': pat_compiled, | |
385 | 'url': url_cleaner( |
|
392 | 'url': url_cleaner( | |
386 | qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''), |
|
393 | qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''), | |
387 | 'pref': bleach.clean( |
|
394 | 'pref': bleach.clean( | |
388 | qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''), |
|
395 | qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''), | |
389 | 'desc': qs.get( |
|
396 | 'desc': qs.get( | |
390 | self._get_keyname('desc', uid, 'rhodecode_')), |
|
397 | self._get_keyname('desc', uid, 'rhodecode_')), | |
391 | }) |
|
398 | }) | |
392 |
|
399 | |||
393 | return issuetracker_entries |
|
400 | return issuetracker_entries | |
394 |
|
401 | |||
395 | def get_global_settings(self, cache=False): |
|
402 | def get_global_settings(self, cache=False): | |
396 | """ |
|
403 | """ | |
397 | Returns list of global issue tracker settings |
|
404 | Returns list of global issue tracker settings | |
398 | """ |
|
405 | """ | |
399 | defaults = self.global_settings.get_all_settings(cache=cache) |
|
406 | defaults = self.global_settings.get_all_settings(cache=cache) | |
400 | settings = self._make_dict_for_settings(defaults) |
|
407 | settings = self._make_dict_for_settings(defaults) | |
401 | return settings |
|
408 | return settings | |
402 |
|
409 | |||
403 | def get_repo_settings(self, cache=False): |
|
410 | def get_repo_settings(self, cache=False): | |
404 | """ |
|
411 | """ | |
405 | Returns list of issue tracker settings per repository |
|
412 | Returns list of issue tracker settings per repository | |
406 | """ |
|
413 | """ | |
407 | if not self.repo_settings: |
|
414 | if not self.repo_settings: | |
408 | raise Exception('Repository is not specified') |
|
415 | raise Exception('Repository is not specified') | |
409 | all_settings = self.repo_settings.get_all_settings(cache=cache) |
|
416 | all_settings = self.repo_settings.get_all_settings(cache=cache) | |
410 | settings = self._make_dict_for_settings(all_settings) |
|
417 | settings = self._make_dict_for_settings(all_settings) | |
411 | return settings |
|
418 | return settings | |
412 |
|
419 | |||
413 | def get_settings(self, cache=False): |
|
420 | def get_settings(self, cache=False): | |
414 | if self.inherit_global_settings: |
|
421 | if self.inherit_global_settings: | |
415 | return self.get_global_settings(cache=cache) |
|
422 | return self.get_global_settings(cache=cache) | |
416 | else: |
|
423 | else: | |
417 | return self.get_repo_settings(cache=cache) |
|
424 | return self.get_repo_settings(cache=cache) | |
418 |
|
425 | |||
419 | def delete_entries(self, uid): |
|
426 | def delete_entries(self, uid): | |
420 | if self.repo_settings: |
|
427 | if self.repo_settings: | |
421 | all_patterns = self.get_repo_settings() |
|
428 | all_patterns = self.get_repo_settings() | |
422 | settings_model = self.repo_settings |
|
429 | settings_model = self.repo_settings | |
423 | else: |
|
430 | else: | |
424 | all_patterns = self.get_global_settings() |
|
431 | all_patterns = self.get_global_settings() | |
425 | settings_model = self.global_settings |
|
432 | settings_model = self.global_settings | |
426 | entries = all_patterns.get(uid, []) |
|
433 | entries = all_patterns.get(uid, []) | |
427 |
|
434 | |||
428 | for del_key in entries: |
|
435 | for del_key in entries: | |
429 | setting_name = self._get_keyname(del_key, uid) |
|
436 | setting_name = self._get_keyname(del_key, uid) | |
430 | entry = settings_model.get_setting_by_name(setting_name) |
|
437 | entry = settings_model.get_setting_by_name(setting_name) | |
431 | if entry: |
|
438 | if entry: | |
432 | Session().delete(entry) |
|
439 | Session().delete(entry) | |
433 |
|
440 | |||
434 | Session().commit() |
|
441 | Session().commit() | |
435 |
|
442 | |||
436 | def create_or_update_setting( |
|
443 | def create_or_update_setting( | |
437 | self, name, val=Optional(''), type_=Optional('unicode')): |
|
444 | self, name, val=Optional(''), type_=Optional('unicode')): | |
438 | if self.repo_settings: |
|
445 | if self.repo_settings: | |
439 | setting = self.repo_settings.create_or_update_setting( |
|
446 | setting = self.repo_settings.create_or_update_setting( | |
440 | name, val, type_) |
|
447 | name, val, type_) | |
441 | else: |
|
448 | else: | |
442 | setting = self.global_settings.create_or_update_setting( |
|
449 | setting = self.global_settings.create_or_update_setting( | |
443 | name, val, type_) |
|
450 | name, val, type_) | |
444 | return setting |
|
451 | return setting | |
445 |
|
452 | |||
446 |
|
453 | |||
447 | class VcsSettingsModel(object): |
|
454 | class VcsSettingsModel(object): | |
448 |
|
455 | |||
449 | INHERIT_SETTINGS = 'inherit_vcs_settings' |
|
456 | INHERIT_SETTINGS = 'inherit_vcs_settings' | |
450 | GENERAL_SETTINGS = ( |
|
457 | GENERAL_SETTINGS = ( | |
451 | 'use_outdated_comments', |
|
458 | 'use_outdated_comments', | |
452 | 'pr_merge_enabled', |
|
459 | 'pr_merge_enabled', | |
453 | 'hg_use_rebase_for_merging', |
|
460 | 'hg_use_rebase_for_merging', | |
454 | 'hg_close_branch_before_merging', |
|
461 | 'hg_close_branch_before_merging', | |
455 | 'git_use_rebase_for_merging', |
|
462 | 'git_use_rebase_for_merging', | |
456 | 'git_close_branch_before_merging', |
|
463 | 'git_close_branch_before_merging', | |
457 | 'diff_cache', |
|
464 | 'diff_cache', | |
458 | ) |
|
465 | ) | |
459 |
|
466 | |||
460 | HOOKS_SETTINGS = ( |
|
467 | HOOKS_SETTINGS = ( | |
461 | ('hooks', 'changegroup.repo_size'), |
|
468 | ('hooks', 'changegroup.repo_size'), | |
462 | ('hooks', 'changegroup.push_logger'), |
|
469 | ('hooks', 'changegroup.push_logger'), | |
463 | ('hooks', 'outgoing.pull_logger'), |
|
470 | ('hooks', 'outgoing.pull_logger'), | |
464 | ) |
|
471 | ) | |
465 | HG_SETTINGS = ( |
|
472 | HG_SETTINGS = ( | |
466 | ('extensions', 'largefiles'), |
|
473 | ('extensions', 'largefiles'), | |
467 | ('phases', 'publish'), |
|
474 | ('phases', 'publish'), | |
468 | ('extensions', 'evolve'), |
|
475 | ('extensions', 'evolve'), | |
469 | ('extensions', 'topic'), |
|
476 | ('extensions', 'topic'), | |
470 | ('experimental', 'evolution'), |
|
477 | ('experimental', 'evolution'), | |
471 | ('experimental', 'evolution.exchange'), |
|
478 | ('experimental', 'evolution.exchange'), | |
472 | ) |
|
479 | ) | |
473 | GIT_SETTINGS = ( |
|
480 | GIT_SETTINGS = ( | |
474 | ('vcs_git_lfs', 'enabled'), |
|
481 | ('vcs_git_lfs', 'enabled'), | |
475 | ) |
|
482 | ) | |
476 | GLOBAL_HG_SETTINGS = ( |
|
483 | GLOBAL_HG_SETTINGS = ( | |
477 | ('extensions', 'largefiles'), |
|
484 | ('extensions', 'largefiles'), | |
478 | ('largefiles', 'usercache'), |
|
485 | ('largefiles', 'usercache'), | |
479 | ('phases', 'publish'), |
|
486 | ('phases', 'publish'), | |
480 | ('extensions', 'hgsubversion'), |
|
487 | ('extensions', 'hgsubversion'), | |
481 | ('extensions', 'evolve'), |
|
488 | ('extensions', 'evolve'), | |
482 | ('extensions', 'topic'), |
|
489 | ('extensions', 'topic'), | |
483 | ('experimental', 'evolution'), |
|
490 | ('experimental', 'evolution'), | |
484 | ('experimental', 'evolution.exchange'), |
|
491 | ('experimental', 'evolution.exchange'), | |
485 | ) |
|
492 | ) | |
486 |
|
493 | |||
487 | GLOBAL_GIT_SETTINGS = ( |
|
494 | GLOBAL_GIT_SETTINGS = ( | |
488 | ('vcs_git_lfs', 'enabled'), |
|
495 | ('vcs_git_lfs', 'enabled'), | |
489 | ('vcs_git_lfs', 'store_location') |
|
496 | ('vcs_git_lfs', 'store_location') | |
490 | ) |
|
497 | ) | |
491 |
|
498 | |||
492 | GLOBAL_SVN_SETTINGS = ( |
|
499 | GLOBAL_SVN_SETTINGS = ( | |
493 | ('vcs_svn_proxy', 'http_requests_enabled'), |
|
500 | ('vcs_svn_proxy', 'http_requests_enabled'), | |
494 | ('vcs_svn_proxy', 'http_server_url') |
|
501 | ('vcs_svn_proxy', 'http_server_url') | |
495 | ) |
|
502 | ) | |
496 |
|
503 | |||
497 | SVN_BRANCH_SECTION = 'vcs_svn_branch' |
|
504 | SVN_BRANCH_SECTION = 'vcs_svn_branch' | |
498 | SVN_TAG_SECTION = 'vcs_svn_tag' |
|
505 | SVN_TAG_SECTION = 'vcs_svn_tag' | |
499 | SSL_SETTING = ('web', 'push_ssl') |
|
506 | SSL_SETTING = ('web', 'push_ssl') | |
500 | PATH_SETTING = ('paths', '/') |
|
507 | PATH_SETTING = ('paths', '/') | |
501 |
|
508 | |||
502 | def __init__(self, sa=None, repo=None): |
|
509 | def __init__(self, sa=None, repo=None): | |
503 | self.global_settings = SettingsModel(sa=sa) |
|
510 | self.global_settings = SettingsModel(sa=sa) | |
504 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None |
|
511 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None | |
505 | self._ui_settings = ( |
|
512 | self._ui_settings = ( | |
506 | self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS) |
|
513 | self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS) | |
507 | self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION) |
|
514 | self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION) | |
508 |
|
515 | |||
509 | @property |
|
516 | @property | |
510 | @assert_repo_settings |
|
517 | @assert_repo_settings | |
511 | def inherit_global_settings(self): |
|
518 | def inherit_global_settings(self): | |
512 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) |
|
519 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) | |
513 | return setting.app_settings_value if setting else True |
|
520 | return setting.app_settings_value if setting else True | |
514 |
|
521 | |||
515 | @inherit_global_settings.setter |
|
522 | @inherit_global_settings.setter | |
516 | @assert_repo_settings |
|
523 | @assert_repo_settings | |
517 | def inherit_global_settings(self, value): |
|
524 | def inherit_global_settings(self, value): | |
518 | self.repo_settings.create_or_update_setting( |
|
525 | self.repo_settings.create_or_update_setting( | |
519 | self.INHERIT_SETTINGS, value, type_='bool') |
|
526 | self.INHERIT_SETTINGS, value, type_='bool') | |
520 |
|
527 | |||
521 | def get_global_svn_branch_patterns(self): |
|
528 | def get_global_svn_branch_patterns(self): | |
522 | return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) |
|
529 | return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) | |
523 |
|
530 | |||
524 | @assert_repo_settings |
|
531 | @assert_repo_settings | |
525 | def get_repo_svn_branch_patterns(self): |
|
532 | def get_repo_svn_branch_patterns(self): | |
526 | return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) |
|
533 | return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) | |
527 |
|
534 | |||
528 | def get_global_svn_tag_patterns(self): |
|
535 | def get_global_svn_tag_patterns(self): | |
529 | return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION) |
|
536 | return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION) | |
530 |
|
537 | |||
531 | @assert_repo_settings |
|
538 | @assert_repo_settings | |
532 | def get_repo_svn_tag_patterns(self): |
|
539 | def get_repo_svn_tag_patterns(self): | |
533 | return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION) |
|
540 | return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION) | |
534 |
|
541 | |||
535 | def get_global_settings(self): |
|
542 | def get_global_settings(self): | |
536 | return self._collect_all_settings(global_=True) |
|
543 | return self._collect_all_settings(global_=True) | |
537 |
|
544 | |||
538 | @assert_repo_settings |
|
545 | @assert_repo_settings | |
539 | def get_repo_settings(self): |
|
546 | def get_repo_settings(self): | |
540 | return self._collect_all_settings(global_=False) |
|
547 | return self._collect_all_settings(global_=False) | |
541 |
|
548 | |||
542 | @assert_repo_settings |
|
549 | @assert_repo_settings | |
543 | def get_repo_settings_inherited(self): |
|
550 | def get_repo_settings_inherited(self): | |
544 | global_settings = self.get_global_settings() |
|
551 | global_settings = self.get_global_settings() | |
545 | global_settings.update(self.get_repo_settings()) |
|
552 | global_settings.update(self.get_repo_settings()) | |
546 | return global_settings |
|
553 | return global_settings | |
547 |
|
554 | |||
548 | @assert_repo_settings |
|
555 | @assert_repo_settings | |
549 | def create_or_update_repo_settings( |
|
556 | def create_or_update_repo_settings( | |
550 | self, data, inherit_global_settings=False): |
|
557 | self, data, inherit_global_settings=False): | |
551 | from rhodecode.model.scm import ScmModel |
|
558 | from rhodecode.model.scm import ScmModel | |
552 |
|
559 | |||
553 | self.inherit_global_settings = inherit_global_settings |
|
560 | self.inherit_global_settings = inherit_global_settings | |
554 |
|
561 | |||
555 | repo = self.repo_settings.get_repo() |
|
562 | repo = self.repo_settings.get_repo() | |
556 | if not inherit_global_settings: |
|
563 | if not inherit_global_settings: | |
557 | if repo.repo_type == 'svn': |
|
564 | if repo.repo_type == 'svn': | |
558 | self.create_repo_svn_settings(data) |
|
565 | self.create_repo_svn_settings(data) | |
559 | else: |
|
566 | else: | |
560 | self.create_or_update_repo_hook_settings(data) |
|
567 | self.create_or_update_repo_hook_settings(data) | |
561 | self.create_or_update_repo_pr_settings(data) |
|
568 | self.create_or_update_repo_pr_settings(data) | |
562 |
|
569 | |||
563 | if repo.repo_type == 'hg': |
|
570 | if repo.repo_type == 'hg': | |
564 | self.create_or_update_repo_hg_settings(data) |
|
571 | self.create_or_update_repo_hg_settings(data) | |
565 |
|
572 | |||
566 | if repo.repo_type == 'git': |
|
573 | if repo.repo_type == 'git': | |
567 | self.create_or_update_repo_git_settings(data) |
|
574 | self.create_or_update_repo_git_settings(data) | |
568 |
|
575 | |||
569 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) |
|
576 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) | |
570 |
|
577 | |||
571 | @assert_repo_settings |
|
578 | @assert_repo_settings | |
572 | def create_or_update_repo_hook_settings(self, data): |
|
579 | def create_or_update_repo_hook_settings(self, data): | |
573 | for section, key in self.HOOKS_SETTINGS: |
|
580 | for section, key in self.HOOKS_SETTINGS: | |
574 | data_key = self._get_form_ui_key(section, key) |
|
581 | data_key = self._get_form_ui_key(section, key) | |
575 | if data_key not in data: |
|
582 | if data_key not in data: | |
576 | raise ValueError( |
|
583 | raise ValueError( | |
577 | 'The given data does not contain {} key'.format(data_key)) |
|
584 | 'The given data does not contain {} key'.format(data_key)) | |
578 |
|
585 | |||
579 | active = data.get(data_key) |
|
586 | active = data.get(data_key) | |
580 | repo_setting = self.repo_settings.get_ui_by_section_and_key( |
|
587 | repo_setting = self.repo_settings.get_ui_by_section_and_key( | |
581 | section, key) |
|
588 | section, key) | |
582 | if not repo_setting: |
|
589 | if not repo_setting: | |
583 | global_setting = self.global_settings.\ |
|
590 | global_setting = self.global_settings.\ | |
584 | get_ui_by_section_and_key(section, key) |
|
591 | get_ui_by_section_and_key(section, key) | |
585 | self.repo_settings.create_ui_section_value( |
|
592 | self.repo_settings.create_ui_section_value( | |
586 | section, global_setting.ui_value, key=key, active=active) |
|
593 | section, global_setting.ui_value, key=key, active=active) | |
587 | else: |
|
594 | else: | |
588 | repo_setting.ui_active = active |
|
595 | repo_setting.ui_active = active | |
589 | Session().add(repo_setting) |
|
596 | Session().add(repo_setting) | |
590 |
|
597 | |||
591 | def update_global_hook_settings(self, data): |
|
598 | def update_global_hook_settings(self, data): | |
592 | for section, key in self.HOOKS_SETTINGS: |
|
599 | for section, key in self.HOOKS_SETTINGS: | |
593 | data_key = self._get_form_ui_key(section, key) |
|
600 | data_key = self._get_form_ui_key(section, key) | |
594 | if data_key not in data: |
|
601 | if data_key not in data: | |
595 | raise ValueError( |
|
602 | raise ValueError( | |
596 | 'The given data does not contain {} key'.format(data_key)) |
|
603 | 'The given data does not contain {} key'.format(data_key)) | |
597 | active = data.get(data_key) |
|
604 | active = data.get(data_key) | |
598 | repo_setting = self.global_settings.get_ui_by_section_and_key( |
|
605 | repo_setting = self.global_settings.get_ui_by_section_and_key( | |
599 | section, key) |
|
606 | section, key) | |
600 | repo_setting.ui_active = active |
|
607 | repo_setting.ui_active = active | |
601 | Session().add(repo_setting) |
|
608 | Session().add(repo_setting) | |
602 |
|
609 | |||
603 | @assert_repo_settings |
|
610 | @assert_repo_settings | |
604 | def create_or_update_repo_pr_settings(self, data): |
|
611 | def create_or_update_repo_pr_settings(self, data): | |
605 | return self._create_or_update_general_settings( |
|
612 | return self._create_or_update_general_settings( | |
606 | self.repo_settings, data) |
|
613 | self.repo_settings, data) | |
607 |
|
614 | |||
608 | def create_or_update_global_pr_settings(self, data): |
|
615 | def create_or_update_global_pr_settings(self, data): | |
609 | return self._create_or_update_general_settings( |
|
616 | return self._create_or_update_general_settings( | |
610 | self.global_settings, data) |
|
617 | self.global_settings, data) | |
611 |
|
618 | |||
612 | @assert_repo_settings |
|
619 | @assert_repo_settings | |
613 | def create_repo_svn_settings(self, data): |
|
620 | def create_repo_svn_settings(self, data): | |
614 | return self._create_svn_settings(self.repo_settings, data) |
|
621 | return self._create_svn_settings(self.repo_settings, data) | |
615 |
|
622 | |||
616 | def _set_evolution(self, settings, is_enabled): |
|
623 | def _set_evolution(self, settings, is_enabled): | |
617 | if is_enabled: |
|
624 | if is_enabled: | |
618 | # if evolve is active set evolution=all |
|
625 | # if evolve is active set evolution=all | |
619 |
|
626 | |||
620 | self._create_or_update_ui( |
|
627 | self._create_or_update_ui( | |
621 | settings, *('experimental', 'evolution'), value='all', |
|
628 | settings, *('experimental', 'evolution'), value='all', | |
622 | active=True) |
|
629 | active=True) | |
623 | self._create_or_update_ui( |
|
630 | self._create_or_update_ui( | |
624 | settings, *('experimental', 'evolution.exchange'), value='yes', |
|
631 | settings, *('experimental', 'evolution.exchange'), value='yes', | |
625 | active=True) |
|
632 | active=True) | |
626 | # if evolve is active set topics server support |
|
633 | # if evolve is active set topics server support | |
627 | self._create_or_update_ui( |
|
634 | self._create_or_update_ui( | |
628 | settings, *('extensions', 'topic'), value='', |
|
635 | settings, *('extensions', 'topic'), value='', | |
629 | active=True) |
|
636 | active=True) | |
630 |
|
637 | |||
631 | else: |
|
638 | else: | |
632 | self._create_or_update_ui( |
|
639 | self._create_or_update_ui( | |
633 | settings, *('experimental', 'evolution'), value='', |
|
640 | settings, *('experimental', 'evolution'), value='', | |
634 | active=False) |
|
641 | active=False) | |
635 | self._create_or_update_ui( |
|
642 | self._create_or_update_ui( | |
636 | settings, *('experimental', 'evolution.exchange'), value='no', |
|
643 | settings, *('experimental', 'evolution.exchange'), value='no', | |
637 | active=False) |
|
644 | active=False) | |
638 | self._create_or_update_ui( |
|
645 | self._create_or_update_ui( | |
639 | settings, *('extensions', 'topic'), value='', |
|
646 | settings, *('extensions', 'topic'), value='', | |
640 | active=False) |
|
647 | active=False) | |
641 |
|
648 | |||
642 | @assert_repo_settings |
|
649 | @assert_repo_settings | |
643 | def create_or_update_repo_hg_settings(self, data): |
|
650 | def create_or_update_repo_hg_settings(self, data): | |
644 | largefiles, phases, evolve = \ |
|
651 | largefiles, phases, evolve = \ | |
645 | self.HG_SETTINGS[:3] |
|
652 | self.HG_SETTINGS[:3] | |
646 | largefiles_key, phases_key, evolve_key = \ |
|
653 | largefiles_key, phases_key, evolve_key = \ | |
647 | self._get_settings_keys(self.HG_SETTINGS[:3], data) |
|
654 | self._get_settings_keys(self.HG_SETTINGS[:3], data) | |
648 |
|
655 | |||
649 | self._create_or_update_ui( |
|
656 | self._create_or_update_ui( | |
650 | self.repo_settings, *largefiles, value='', |
|
657 | self.repo_settings, *largefiles, value='', | |
651 | active=data[largefiles_key]) |
|
658 | active=data[largefiles_key]) | |
652 | self._create_or_update_ui( |
|
659 | self._create_or_update_ui( | |
653 | self.repo_settings, *evolve, value='', |
|
660 | self.repo_settings, *evolve, value='', | |
654 | active=data[evolve_key]) |
|
661 | active=data[evolve_key]) | |
655 | self._set_evolution(self.repo_settings, is_enabled=data[evolve_key]) |
|
662 | self._set_evolution(self.repo_settings, is_enabled=data[evolve_key]) | |
656 |
|
663 | |||
657 | self._create_or_update_ui( |
|
664 | self._create_or_update_ui( | |
658 | self.repo_settings, *phases, value=safe_str(data[phases_key])) |
|
665 | self.repo_settings, *phases, value=safe_str(data[phases_key])) | |
659 |
|
666 | |||
660 | def create_or_update_global_hg_settings(self, data): |
|
667 | def create_or_update_global_hg_settings(self, data): | |
661 | largefiles, largefiles_store, phases, hgsubversion, evolve \ |
|
668 | largefiles, largefiles_store, phases, hgsubversion, evolve \ | |
662 | = self.GLOBAL_HG_SETTINGS[:5] |
|
669 | = self.GLOBAL_HG_SETTINGS[:5] | |
663 | largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \ |
|
670 | largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \ | |
664 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data) |
|
671 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data) | |
665 |
|
672 | |||
666 | self._create_or_update_ui( |
|
673 | self._create_or_update_ui( | |
667 | self.global_settings, *largefiles, value='', |
|
674 | self.global_settings, *largefiles, value='', | |
668 | active=data[largefiles_key]) |
|
675 | active=data[largefiles_key]) | |
669 | self._create_or_update_ui( |
|
676 | self._create_or_update_ui( | |
670 | self.global_settings, *largefiles_store, value=data[largefiles_store_key]) |
|
677 | self.global_settings, *largefiles_store, value=data[largefiles_store_key]) | |
671 | self._create_or_update_ui( |
|
678 | self._create_or_update_ui( | |
672 | self.global_settings, *phases, value=safe_str(data[phases_key])) |
|
679 | self.global_settings, *phases, value=safe_str(data[phases_key])) | |
673 | self._create_or_update_ui( |
|
680 | self._create_or_update_ui( | |
674 | self.global_settings, *hgsubversion, active=data[subversion_key]) |
|
681 | self.global_settings, *hgsubversion, active=data[subversion_key]) | |
675 | self._create_or_update_ui( |
|
682 | self._create_or_update_ui( | |
676 | self.global_settings, *evolve, value='', |
|
683 | self.global_settings, *evolve, value='', | |
677 | active=data[evolve_key]) |
|
684 | active=data[evolve_key]) | |
678 | self._set_evolution(self.global_settings, is_enabled=data[evolve_key]) |
|
685 | self._set_evolution(self.global_settings, is_enabled=data[evolve_key]) | |
679 |
|
686 | |||
680 | def create_or_update_repo_git_settings(self, data): |
|
687 | def create_or_update_repo_git_settings(self, data): | |
681 | # NOTE(marcink): # comma makes unpack work properly |
|
688 | # NOTE(marcink): # comma makes unpack work properly | |
682 | lfs_enabled, \ |
|
689 | lfs_enabled, \ | |
683 | = self.GIT_SETTINGS |
|
690 | = self.GIT_SETTINGS | |
684 |
|
691 | |||
685 | lfs_enabled_key, \ |
|
692 | lfs_enabled_key, \ | |
686 | = self._get_settings_keys(self.GIT_SETTINGS, data) |
|
693 | = self._get_settings_keys(self.GIT_SETTINGS, data) | |
687 |
|
694 | |||
688 | self._create_or_update_ui( |
|
695 | self._create_or_update_ui( | |
689 | self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key], |
|
696 | self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key], | |
690 | active=data[lfs_enabled_key]) |
|
697 | active=data[lfs_enabled_key]) | |
691 |
|
698 | |||
692 | def create_or_update_global_git_settings(self, data): |
|
699 | def create_or_update_global_git_settings(self, data): | |
693 | lfs_enabled, lfs_store_location \ |
|
700 | lfs_enabled, lfs_store_location \ | |
694 | = self.GLOBAL_GIT_SETTINGS |
|
701 | = self.GLOBAL_GIT_SETTINGS | |
695 | lfs_enabled_key, lfs_store_location_key \ |
|
702 | lfs_enabled_key, lfs_store_location_key \ | |
696 | = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data) |
|
703 | = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data) | |
697 |
|
704 | |||
698 | self._create_or_update_ui( |
|
705 | self._create_or_update_ui( | |
699 | self.global_settings, *lfs_enabled, value=data[lfs_enabled_key], |
|
706 | self.global_settings, *lfs_enabled, value=data[lfs_enabled_key], | |
700 | active=data[lfs_enabled_key]) |
|
707 | active=data[lfs_enabled_key]) | |
701 | self._create_or_update_ui( |
|
708 | self._create_or_update_ui( | |
702 | self.global_settings, *lfs_store_location, |
|
709 | self.global_settings, *lfs_store_location, | |
703 | value=data[lfs_store_location_key]) |
|
710 | value=data[lfs_store_location_key]) | |
704 |
|
711 | |||
705 | def create_or_update_global_svn_settings(self, data): |
|
712 | def create_or_update_global_svn_settings(self, data): | |
706 | # branch/tags patterns |
|
713 | # branch/tags patterns | |
707 | self._create_svn_settings(self.global_settings, data) |
|
714 | self._create_svn_settings(self.global_settings, data) | |
708 |
|
715 | |||
709 | http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS |
|
716 | http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS | |
710 | http_requests_enabled_key, http_server_url_key = self._get_settings_keys( |
|
717 | http_requests_enabled_key, http_server_url_key = self._get_settings_keys( | |
711 | self.GLOBAL_SVN_SETTINGS, data) |
|
718 | self.GLOBAL_SVN_SETTINGS, data) | |
712 |
|
719 | |||
713 | self._create_or_update_ui( |
|
720 | self._create_or_update_ui( | |
714 | self.global_settings, *http_requests_enabled, |
|
721 | self.global_settings, *http_requests_enabled, | |
715 | value=safe_str(data[http_requests_enabled_key])) |
|
722 | value=safe_str(data[http_requests_enabled_key])) | |
716 | self._create_or_update_ui( |
|
723 | self._create_or_update_ui( | |
717 | self.global_settings, *http_server_url, |
|
724 | self.global_settings, *http_server_url, | |
718 | value=data[http_server_url_key]) |
|
725 | value=data[http_server_url_key]) | |
719 |
|
726 | |||
720 | def update_global_ssl_setting(self, value): |
|
727 | def update_global_ssl_setting(self, value): | |
721 | self._create_or_update_ui( |
|
728 | self._create_or_update_ui( | |
722 | self.global_settings, *self.SSL_SETTING, value=value) |
|
729 | self.global_settings, *self.SSL_SETTING, value=value) | |
723 |
|
730 | |||
724 | def update_global_path_setting(self, value): |
|
731 | def update_global_path_setting(self, value): | |
725 | self._create_or_update_ui( |
|
732 | self._create_or_update_ui( | |
726 | self.global_settings, *self.PATH_SETTING, value=value) |
|
733 | self.global_settings, *self.PATH_SETTING, value=value) | |
727 |
|
734 | |||
728 | @assert_repo_settings |
|
735 | @assert_repo_settings | |
729 | def delete_repo_svn_pattern(self, id_): |
|
736 | def delete_repo_svn_pattern(self, id_): | |
730 | ui = self.repo_settings.UiDbModel.get(id_) |
|
737 | ui = self.repo_settings.UiDbModel.get(id_) | |
731 | if ui and ui.repository.repo_name == self.repo_settings.repo: |
|
738 | if ui and ui.repository.repo_name == self.repo_settings.repo: | |
732 | # only delete if it's the same repo as initialized settings |
|
739 | # only delete if it's the same repo as initialized settings | |
733 | self.repo_settings.delete_ui(id_) |
|
740 | self.repo_settings.delete_ui(id_) | |
734 | else: |
|
741 | else: | |
735 | # raise error as if we wouldn't find this option |
|
742 | # raise error as if we wouldn't find this option | |
736 | self.repo_settings.delete_ui(-1) |
|
743 | self.repo_settings.delete_ui(-1) | |
737 |
|
744 | |||
738 | def delete_global_svn_pattern(self, id_): |
|
745 | def delete_global_svn_pattern(self, id_): | |
739 | self.global_settings.delete_ui(id_) |
|
746 | self.global_settings.delete_ui(id_) | |
740 |
|
747 | |||
741 | @assert_repo_settings |
|
748 | @assert_repo_settings | |
742 | def get_repo_ui_settings(self, section=None, key=None): |
|
749 | def get_repo_ui_settings(self, section=None, key=None): | |
743 | global_uis = self.global_settings.get_ui(section, key) |
|
750 | global_uis = self.global_settings.get_ui(section, key) | |
744 | repo_uis = self.repo_settings.get_ui(section, key) |
|
751 | repo_uis = self.repo_settings.get_ui(section, key) | |
745 |
|
752 | |||
746 | filtered_repo_uis = self._filter_ui_settings(repo_uis) |
|
753 | filtered_repo_uis = self._filter_ui_settings(repo_uis) | |
747 | filtered_repo_uis_keys = [ |
|
754 | filtered_repo_uis_keys = [ | |
748 | (s.section, s.key) for s in filtered_repo_uis] |
|
755 | (s.section, s.key) for s in filtered_repo_uis] | |
749 |
|
756 | |||
750 | def _is_global_ui_filtered(ui): |
|
757 | def _is_global_ui_filtered(ui): | |
751 | return ( |
|
758 | return ( | |
752 | (ui.section, ui.key) in filtered_repo_uis_keys |
|
759 | (ui.section, ui.key) in filtered_repo_uis_keys | |
753 | or ui.section in self._svn_sections) |
|
760 | or ui.section in self._svn_sections) | |
754 |
|
761 | |||
755 | filtered_global_uis = [ |
|
762 | filtered_global_uis = [ | |
756 | ui for ui in global_uis if not _is_global_ui_filtered(ui)] |
|
763 | ui for ui in global_uis if not _is_global_ui_filtered(ui)] | |
757 |
|
764 | |||
758 | return filtered_global_uis + filtered_repo_uis |
|
765 | return filtered_global_uis + filtered_repo_uis | |
759 |
|
766 | |||
760 | def get_global_ui_settings(self, section=None, key=None): |
|
767 | def get_global_ui_settings(self, section=None, key=None): | |
761 | return self.global_settings.get_ui(section, key) |
|
768 | return self.global_settings.get_ui(section, key) | |
762 |
|
769 | |||
763 | def get_ui_settings_as_config_obj(self, section=None, key=None): |
|
770 | def get_ui_settings_as_config_obj(self, section=None, key=None): | |
764 | config = base.Config() |
|
771 | config = base.Config() | |
765 |
|
772 | |||
766 | ui_settings = self.get_ui_settings(section=section, key=key) |
|
773 | ui_settings = self.get_ui_settings(section=section, key=key) | |
767 |
|
774 | |||
768 | for entry in ui_settings: |
|
775 | for entry in ui_settings: | |
769 | config.set(entry.section, entry.key, entry.value) |
|
776 | config.set(entry.section, entry.key, entry.value) | |
770 |
|
777 | |||
771 | return config |
|
778 | return config | |
772 |
|
779 | |||
773 | def get_ui_settings(self, section=None, key=None): |
|
780 | def get_ui_settings(self, section=None, key=None): | |
774 | if not self.repo_settings or self.inherit_global_settings: |
|
781 | if not self.repo_settings or self.inherit_global_settings: | |
775 | return self.get_global_ui_settings(section, key) |
|
782 | return self.get_global_ui_settings(section, key) | |
776 | else: |
|
783 | else: | |
777 | return self.get_repo_ui_settings(section, key) |
|
784 | return self.get_repo_ui_settings(section, key) | |
778 |
|
785 | |||
779 | def get_svn_patterns(self, section=None): |
|
786 | def get_svn_patterns(self, section=None): | |
780 | if not self.repo_settings: |
|
787 | if not self.repo_settings: | |
781 | return self.get_global_ui_settings(section) |
|
788 | return self.get_global_ui_settings(section) | |
782 | else: |
|
789 | else: | |
783 | return self.get_repo_ui_settings(section) |
|
790 | return self.get_repo_ui_settings(section) | |
784 |
|
791 | |||
785 | @assert_repo_settings |
|
792 | @assert_repo_settings | |
786 | def get_repo_general_settings(self): |
|
793 | def get_repo_general_settings(self): | |
787 | global_settings = self.global_settings.get_all_settings() |
|
794 | global_settings = self.global_settings.get_all_settings() | |
788 | repo_settings = self.repo_settings.get_all_settings() |
|
795 | repo_settings = self.repo_settings.get_all_settings() | |
789 | filtered_repo_settings = self._filter_general_settings(repo_settings) |
|
796 | filtered_repo_settings = self._filter_general_settings(repo_settings) | |
790 | global_settings.update(filtered_repo_settings) |
|
797 | global_settings.update(filtered_repo_settings) | |
791 | return global_settings |
|
798 | return global_settings | |
792 |
|
799 | |||
793 | def get_global_general_settings(self): |
|
800 | def get_global_general_settings(self): | |
794 | return self.global_settings.get_all_settings() |
|
801 | return self.global_settings.get_all_settings() | |
795 |
|
802 | |||
796 | def get_general_settings(self): |
|
803 | def get_general_settings(self): | |
797 | if not self.repo_settings or self.inherit_global_settings: |
|
804 | if not self.repo_settings or self.inherit_global_settings: | |
798 | return self.get_global_general_settings() |
|
805 | return self.get_global_general_settings() | |
799 | else: |
|
806 | else: | |
800 | return self.get_repo_general_settings() |
|
807 | return self.get_repo_general_settings() | |
801 |
|
808 | |||
802 | def get_repos_location(self): |
|
809 | def get_repos_location(self): | |
803 | return self.global_settings.get_ui_by_key('/').ui_value |
|
810 | return self.global_settings.get_ui_by_key('/').ui_value | |
804 |
|
811 | |||
805 | def _filter_ui_settings(self, settings): |
|
812 | def _filter_ui_settings(self, settings): | |
806 | filtered_settings = [ |
|
813 | filtered_settings = [ | |
807 | s for s in settings if self._should_keep_setting(s)] |
|
814 | s for s in settings if self._should_keep_setting(s)] | |
808 | return filtered_settings |
|
815 | return filtered_settings | |
809 |
|
816 | |||
810 | def _should_keep_setting(self, setting): |
|
817 | def _should_keep_setting(self, setting): | |
811 | keep = ( |
|
818 | keep = ( | |
812 | (setting.section, setting.key) in self._ui_settings or |
|
819 | (setting.section, setting.key) in self._ui_settings or | |
813 | setting.section in self._svn_sections) |
|
820 | setting.section in self._svn_sections) | |
814 | return keep |
|
821 | return keep | |
815 |
|
822 | |||
816 | def _filter_general_settings(self, settings): |
|
823 | def _filter_general_settings(self, settings): | |
817 | keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS] |
|
824 | keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS] | |
818 | return { |
|
825 | return { | |
819 | k: settings[k] |
|
826 | k: settings[k] | |
820 | for k in settings if k in keys} |
|
827 | for k in settings if k in keys} | |
821 |
|
828 | |||
822 | def _collect_all_settings(self, global_=False): |
|
829 | def _collect_all_settings(self, global_=False): | |
823 | settings = self.global_settings if global_ else self.repo_settings |
|
830 | settings = self.global_settings if global_ else self.repo_settings | |
824 | result = {} |
|
831 | result = {} | |
825 |
|
832 | |||
826 | for section, key in self._ui_settings: |
|
833 | for section, key in self._ui_settings: | |
827 | ui = settings.get_ui_by_section_and_key(section, key) |
|
834 | ui = settings.get_ui_by_section_and_key(section, key) | |
828 | result_key = self._get_form_ui_key(section, key) |
|
835 | result_key = self._get_form_ui_key(section, key) | |
829 |
|
836 | |||
830 | if ui: |
|
837 | if ui: | |
831 | if section in ('hooks', 'extensions'): |
|
838 | if section in ('hooks', 'extensions'): | |
832 | result[result_key] = ui.ui_active |
|
839 | result[result_key] = ui.ui_active | |
833 | elif result_key in ['vcs_git_lfs_enabled']: |
|
840 | elif result_key in ['vcs_git_lfs_enabled']: | |
834 | result[result_key] = ui.ui_active |
|
841 | result[result_key] = ui.ui_active | |
835 | else: |
|
842 | else: | |
836 | result[result_key] = ui.ui_value |
|
843 | result[result_key] = ui.ui_value | |
837 |
|
844 | |||
838 | for name in self.GENERAL_SETTINGS: |
|
845 | for name in self.GENERAL_SETTINGS: | |
839 | setting = settings.get_setting_by_name(name) |
|
846 | setting = settings.get_setting_by_name(name) | |
840 | if setting: |
|
847 | if setting: | |
841 | result_key = 'rhodecode_{}'.format(name) |
|
848 | result_key = 'rhodecode_{}'.format(name) | |
842 | result[result_key] = setting.app_settings_value |
|
849 | result[result_key] = setting.app_settings_value | |
843 |
|
850 | |||
844 | return result |
|
851 | return result | |
845 |
|
852 | |||
846 | def _get_form_ui_key(self, section, key): |
|
853 | def _get_form_ui_key(self, section, key): | |
847 | return '{section}_{key}'.format( |
|
854 | return '{section}_{key}'.format( | |
848 | section=section, key=key.replace('.', '_')) |
|
855 | section=section, key=key.replace('.', '_')) | |
849 |
|
856 | |||
850 | def _create_or_update_ui( |
|
857 | def _create_or_update_ui( | |
851 | self, settings, section, key, value=None, active=None): |
|
858 | self, settings, section, key, value=None, active=None): | |
852 | ui = settings.get_ui_by_section_and_key(section, key) |
|
859 | ui = settings.get_ui_by_section_and_key(section, key) | |
853 | if not ui: |
|
860 | if not ui: | |
854 | active = True if active is None else active |
|
861 | active = True if active is None else active | |
855 | settings.create_ui_section_value( |
|
862 | settings.create_ui_section_value( | |
856 | section, value, key=key, active=active) |
|
863 | section, value, key=key, active=active) | |
857 | else: |
|
864 | else: | |
858 | if active is not None: |
|
865 | if active is not None: | |
859 | ui.ui_active = active |
|
866 | ui.ui_active = active | |
860 | if value is not None: |
|
867 | if value is not None: | |
861 | ui.ui_value = value |
|
868 | ui.ui_value = value | |
862 | Session().add(ui) |
|
869 | Session().add(ui) | |
863 |
|
870 | |||
864 | def _create_svn_settings(self, settings, data): |
|
871 | def _create_svn_settings(self, settings, data): | |
865 | svn_settings = { |
|
872 | svn_settings = { | |
866 | 'new_svn_branch': self.SVN_BRANCH_SECTION, |
|
873 | 'new_svn_branch': self.SVN_BRANCH_SECTION, | |
867 | 'new_svn_tag': self.SVN_TAG_SECTION |
|
874 | 'new_svn_tag': self.SVN_TAG_SECTION | |
868 | } |
|
875 | } | |
869 | for key in svn_settings: |
|
876 | for key in svn_settings: | |
870 | if data.get(key): |
|
877 | if data.get(key): | |
871 | settings.create_ui_section_value(svn_settings[key], data[key]) |
|
878 | settings.create_ui_section_value(svn_settings[key], data[key]) | |
872 |
|
879 | |||
873 | def _create_or_update_general_settings(self, settings, data): |
|
880 | def _create_or_update_general_settings(self, settings, data): | |
874 | for name in self.GENERAL_SETTINGS: |
|
881 | for name in self.GENERAL_SETTINGS: | |
875 | data_key = 'rhodecode_{}'.format(name) |
|
882 | data_key = 'rhodecode_{}'.format(name) | |
876 | if data_key not in data: |
|
883 | if data_key not in data: | |
877 | raise ValueError( |
|
884 | raise ValueError( | |
878 | 'The given data does not contain {} key'.format(data_key)) |
|
885 | 'The given data does not contain {} key'.format(data_key)) | |
879 | setting = settings.create_or_update_setting( |
|
886 | setting = settings.create_or_update_setting( | |
880 | name, data[data_key], 'bool') |
|
887 | name, data[data_key], 'bool') | |
881 | Session().add(setting) |
|
888 | Session().add(setting) | |
882 |
|
889 | |||
883 | def _get_settings_keys(self, settings, data): |
|
890 | def _get_settings_keys(self, settings, data): | |
884 | data_keys = [self._get_form_ui_key(*s) for s in settings] |
|
891 | data_keys = [self._get_form_ui_key(*s) for s in settings] | |
885 | for data_key in data_keys: |
|
892 | for data_key in data_keys: | |
886 | if data_key not in data: |
|
893 | if data_key not in data: | |
887 | raise ValueError( |
|
894 | raise ValueError( | |
888 | 'The given data does not contain {} key'.format(data_key)) |
|
895 | 'The given data does not contain {} key'.format(data_key)) | |
889 | return data_keys |
|
896 | return data_keys | |
890 |
|
897 | |||
891 | def create_largeobjects_dirs_if_needed(self, repo_store_path): |
|
898 | def create_largeobjects_dirs_if_needed(self, repo_store_path): | |
892 | """ |
|
899 | """ | |
893 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It |
|
900 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It | |
894 | does a repository scan if enabled in the settings. |
|
901 | does a repository scan if enabled in the settings. | |
895 | """ |
|
902 | """ | |
896 |
|
903 | |||
897 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
904 | from rhodecode.lib.vcs.backends.hg import largefiles_store | |
898 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
905 | from rhodecode.lib.vcs.backends.git import lfs_store | |
899 |
|
906 | |||
900 | paths = [ |
|
907 | paths = [ | |
901 | largefiles_store(repo_store_path), |
|
908 | largefiles_store(repo_store_path), | |
902 | lfs_store(repo_store_path)] |
|
909 | lfs_store(repo_store_path)] | |
903 |
|
910 | |||
904 | for path in paths: |
|
911 | for path in paths: | |
905 | if os.path.isdir(path): |
|
912 | if os.path.isdir(path): | |
906 | continue |
|
913 | continue | |
907 | if os.path.isfile(path): |
|
914 | if os.path.isfile(path): | |
908 | continue |
|
915 | continue | |
909 | # not a file nor dir, we try to create it |
|
916 | # not a file nor dir, we try to create it | |
910 | try: |
|
917 | try: | |
911 | os.makedirs(path) |
|
918 | os.makedirs(path) | |
912 | except Exception: |
|
919 | except Exception: | |
913 | log.warning('Failed to create largefiles dir:%s', path) |
|
920 | log.warning('Failed to create largefiles dir:%s', path) |
@@ -1,416 +1,417 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Helpers for fixture generation |
|
22 | Helpers for fixture generation | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os |
|
25 | import os | |
26 | import time |
|
26 | import time | |
27 | import tempfile |
|
27 | import tempfile | |
28 | import shutil |
|
28 | import shutil | |
29 |
|
29 | |||
30 | import configobj |
|
30 | import configobj | |
31 |
|
31 | |||
|
32 | from rhodecode.model.settings import SettingsModel | |||
32 | from rhodecode.tests import * |
|
33 | from rhodecode.tests import * | |
33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap |
|
34 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap | |
34 | from rhodecode.model.meta import Session |
|
35 | from rhodecode.model.meta import Session | |
35 | from rhodecode.model.repo import RepoModel |
|
36 | from rhodecode.model.repo import RepoModel | |
36 | from rhodecode.model.user import UserModel |
|
37 | from rhodecode.model.user import UserModel | |
37 | from rhodecode.model.repo_group import RepoGroupModel |
|
38 | from rhodecode.model.repo_group import RepoGroupModel | |
38 | from rhodecode.model.user_group import UserGroupModel |
|
39 | from rhodecode.model.user_group import UserGroupModel | |
39 | from rhodecode.model.gist import GistModel |
|
40 | from rhodecode.model.gist import GistModel | |
40 | from rhodecode.model.auth_token import AuthTokenModel |
|
41 | from rhodecode.model.auth_token import AuthTokenModel | |
41 | from rhodecode.authentication.plugins.auth_rhodecode import \ |
|
42 | from rhodecode.authentication.plugins.auth_rhodecode import \ | |
42 | RhodeCodeAuthPlugin |
|
43 | RhodeCodeAuthPlugin | |
43 |
|
44 | |||
44 | dn = os.path.dirname |
|
45 | dn = os.path.dirname | |
45 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
46 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') | |
46 |
|
47 | |||
47 |
|
48 | |||
48 | def error_function(*args, **kwargs): |
|
49 | def error_function(*args, **kwargs): | |
49 | raise Exception('Total Crash !') |
|
50 | raise Exception('Total Crash !') | |
50 |
|
51 | |||
51 |
|
52 | |||
52 | class TestINI(object): |
|
53 | class TestINI(object): | |
53 | """ |
|
54 | """ | |
54 | Allows to create a new test.ini file as a copy of existing one with edited |
|
55 | Allows to create a new test.ini file as a copy of existing one with edited | |
55 | data. Example usage:: |
|
56 | data. Example usage:: | |
56 |
|
57 | |||
57 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
58 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: | |
58 | print('paster server %s' % new_test_ini) |
|
59 | print('paster server %s' % new_test_ini) | |
59 | """ |
|
60 | """ | |
60 |
|
61 | |||
61 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
62 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', | |
62 | destroy=True, dir=None): |
|
63 | destroy=True, dir=None): | |
63 | self.ini_file_path = ini_file_path |
|
64 | self.ini_file_path = ini_file_path | |
64 | self.ini_params = ini_params |
|
65 | self.ini_params = ini_params | |
65 | self.new_path = None |
|
66 | self.new_path = None | |
66 | self.new_path_prefix = new_file_prefix |
|
67 | self.new_path_prefix = new_file_prefix | |
67 | self._destroy = destroy |
|
68 | self._destroy = destroy | |
68 | self._dir = dir |
|
69 | self._dir = dir | |
69 |
|
70 | |||
70 | def __enter__(self): |
|
71 | def __enter__(self): | |
71 | return self.create() |
|
72 | return self.create() | |
72 |
|
73 | |||
73 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
74 | def __exit__(self, exc_type, exc_val, exc_tb): | |
74 | self.destroy() |
|
75 | self.destroy() | |
75 |
|
76 | |||
76 | def create(self): |
|
77 | def create(self): | |
77 | config = configobj.ConfigObj( |
|
78 | config = configobj.ConfigObj( | |
78 | self.ini_file_path, file_error=True, write_empty_values=True) |
|
79 | self.ini_file_path, file_error=True, write_empty_values=True) | |
79 |
|
80 | |||
80 | for data in self.ini_params: |
|
81 | for data in self.ini_params: | |
81 | section, ini_params = data.items()[0] |
|
82 | section, ini_params = data.items()[0] | |
82 | for key, val in ini_params.items(): |
|
83 | for key, val in ini_params.items(): | |
83 | config[section][key] = val |
|
84 | config[section][key] = val | |
84 | with tempfile.NamedTemporaryFile( |
|
85 | with tempfile.NamedTemporaryFile( | |
85 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
86 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, | |
86 | delete=False) as new_ini_file: |
|
87 | delete=False) as new_ini_file: | |
87 | config.write(new_ini_file) |
|
88 | config.write(new_ini_file) | |
88 | self.new_path = new_ini_file.name |
|
89 | self.new_path = new_ini_file.name | |
89 |
|
90 | |||
90 | return self.new_path |
|
91 | return self.new_path | |
91 |
|
92 | |||
92 | def destroy(self): |
|
93 | def destroy(self): | |
93 | if self._destroy: |
|
94 | if self._destroy: | |
94 | os.remove(self.new_path) |
|
95 | os.remove(self.new_path) | |
95 |
|
96 | |||
96 |
|
97 | |||
97 | class Fixture(object): |
|
98 | class Fixture(object): | |
98 |
|
99 | |||
99 | def anon_access(self, status): |
|
100 | def anon_access(self, status): | |
100 | """ |
|
101 | """ | |
101 | Context process for disabling anonymous access. use like: |
|
102 | Context process for disabling anonymous access. use like: | |
102 | fixture = Fixture() |
|
103 | fixture = Fixture() | |
103 | with fixture.anon_access(False): |
|
104 | with fixture.anon_access(False): | |
104 | #tests |
|
105 | #tests | |
105 |
|
106 | |||
106 | after this block anon access will be set to `not status` |
|
107 | after this block anon access will be set to `not status` | |
107 | """ |
|
108 | """ | |
108 |
|
109 | |||
109 | class context(object): |
|
110 | class context(object): | |
110 | def __enter__(self): |
|
111 | def __enter__(self): | |
111 | anon = User.get_default_user() |
|
112 | anon = User.get_default_user() | |
112 | anon.active = status |
|
113 | anon.active = status | |
113 | Session().add(anon) |
|
114 | Session().add(anon) | |
114 | Session().commit() |
|
115 | Session().commit() | |
115 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
116 | time.sleep(1.5) # must sleep for cache (1s to expire) | |
116 |
|
117 | |||
117 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
118 | def __exit__(self, exc_type, exc_val, exc_tb): | |
118 | anon = User.get_default_user() |
|
119 | anon = User.get_default_user() | |
119 | anon.active = not status |
|
120 | anon.active = not status | |
120 | Session().add(anon) |
|
121 | Session().add(anon) | |
121 | Session().commit() |
|
122 | Session().commit() | |
122 |
|
123 | |||
123 | return context() |
|
124 | return context() | |
124 |
|
125 | |||
125 | def auth_restriction(self, auth_restriction): |
|
126 | def auth_restriction(self, registry, auth_restriction): | |
126 | """ |
|
127 | """ | |
127 | Context process for changing the builtin rhodecode plugin auth restrictions. |
|
128 | Context process for changing the builtin rhodecode plugin auth restrictions. | |
128 | Use like: |
|
129 | Use like: | |
129 | fixture = Fixture() |
|
130 | fixture = Fixture() | |
130 | with fixture.auth_restriction('super_admin'): |
|
131 | with fixture.auth_restriction('super_admin'): | |
131 | #tests |
|
132 | #tests | |
132 |
|
133 | |||
133 | after this block auth restriction will be taken off |
|
134 | after this block auth restriction will be taken off | |
134 | """ |
|
135 | """ | |
135 |
|
136 | |||
136 | class context(object): |
|
137 | class context(object): | |
137 | def _get_pluing(self): |
|
138 | def _get_pluing(self): | |
138 | plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format( |
|
139 | plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid) | |
139 | RhodeCodeAuthPlugin.uid) |
|
|||
140 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
140 | plugin = RhodeCodeAuthPlugin(plugin_id) | |
141 | return plugin |
|
141 | return plugin | |
142 |
|
142 | |||
143 | def __enter__(self): |
|
143 | def __enter__(self): | |
144 | plugin = self._get_pluing() |
|
144 | plugin = self._get_pluing() | |
145 | plugin.create_or_update_setting( |
|
145 | plugin.create_or_update_setting('auth_restriction', auth_restriction) | |
146 | 'auth_restriction', auth_restriction) |
|
|||
147 | Session().commit() |
|
146 | Session().commit() | |
|
147 | SettingsModel().invalidate_settings_cache() | |||
148 |
|
148 | |||
149 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
149 | def __exit__(self, exc_type, exc_val, exc_tb): | |
150 | plugin = self._get_pluing() |
|
150 | plugin = self._get_pluing() | |
151 | plugin.create_or_update_setting( |
|
151 | plugin.create_or_update_setting( | |
152 | 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) |
|
152 | 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) | |
153 | Session().commit() |
|
153 | Session().commit() | |
|
154 | SettingsModel().invalidate_settings_cache() | |||
154 |
|
155 | |||
155 | return context() |
|
156 | return context() | |
156 |
|
157 | |||
157 | def scope_restriction(self, scope_restriction): |
|
158 | def scope_restriction(self, registry, scope_restriction): | |
158 | """ |
|
159 | """ | |
159 | Context process for changing the builtin rhodecode plugin scope restrictions. |
|
160 | Context process for changing the builtin rhodecode plugin scope restrictions. | |
160 | Use like: |
|
161 | Use like: | |
161 | fixture = Fixture() |
|
162 | fixture = Fixture() | |
162 | with fixture.scope_restriction('scope_http'): |
|
163 | with fixture.scope_restriction('scope_http'): | |
163 | #tests |
|
164 | #tests | |
164 |
|
165 | |||
165 | after this block scope restriction will be taken off |
|
166 | after this block scope restriction will be taken off | |
166 | """ |
|
167 | """ | |
167 |
|
168 | |||
168 | class context(object): |
|
169 | class context(object): | |
169 | def _get_pluing(self): |
|
170 | def _get_pluing(self): | |
170 | plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format( |
|
171 | plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid) | |
171 | RhodeCodeAuthPlugin.uid) |
|
|||
172 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
172 | plugin = RhodeCodeAuthPlugin(plugin_id) | |
173 | return plugin |
|
173 | return plugin | |
174 |
|
174 | |||
175 | def __enter__(self): |
|
175 | def __enter__(self): | |
176 | plugin = self._get_pluing() |
|
176 | plugin = self._get_pluing() | |
177 | plugin.create_or_update_setting( |
|
177 | plugin.create_or_update_setting('scope_restriction', scope_restriction) | |
178 | 'scope_restriction', scope_restriction) |
|
|||
179 | Session().commit() |
|
178 | Session().commit() | |
|
179 | SettingsModel().invalidate_settings_cache() | |||
180 |
|
180 | |||
181 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
181 | def __exit__(self, exc_type, exc_val, exc_tb): | |
182 | plugin = self._get_pluing() |
|
182 | plugin = self._get_pluing() | |
183 | plugin.create_or_update_setting( |
|
183 | plugin.create_or_update_setting( | |
184 | 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) |
|
184 | 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) | |
185 | Session().commit() |
|
185 | Session().commit() | |
|
186 | SettingsModel().invalidate_settings_cache() | |||
186 |
|
187 | |||
187 | return context() |
|
188 | return context() | |
188 |
|
189 | |||
189 | def _get_repo_create_params(self, **custom): |
|
190 | def _get_repo_create_params(self, **custom): | |
190 | defs = { |
|
191 | defs = { | |
191 | 'repo_name': None, |
|
192 | 'repo_name': None, | |
192 | 'repo_type': 'hg', |
|
193 | 'repo_type': 'hg', | |
193 | 'clone_uri': '', |
|
194 | 'clone_uri': '', | |
194 | 'push_uri': '', |
|
195 | 'push_uri': '', | |
195 | 'repo_group': '-1', |
|
196 | 'repo_group': '-1', | |
196 | 'repo_description': 'DESC', |
|
197 | 'repo_description': 'DESC', | |
197 | 'repo_private': False, |
|
198 | 'repo_private': False, | |
198 | 'repo_landing_rev': 'rev:tip', |
|
199 | 'repo_landing_rev': 'rev:tip', | |
199 | 'repo_copy_permissions': False, |
|
200 | 'repo_copy_permissions': False, | |
200 | 'repo_state': Repository.STATE_CREATED, |
|
201 | 'repo_state': Repository.STATE_CREATED, | |
201 | } |
|
202 | } | |
202 | defs.update(custom) |
|
203 | defs.update(custom) | |
203 | if 'repo_name_full' not in custom: |
|
204 | if 'repo_name_full' not in custom: | |
204 | defs.update({'repo_name_full': defs['repo_name']}) |
|
205 | defs.update({'repo_name_full': defs['repo_name']}) | |
205 |
|
206 | |||
206 | # fix the repo name if passed as repo_name_full |
|
207 | # fix the repo name if passed as repo_name_full | |
207 | if defs['repo_name']: |
|
208 | if defs['repo_name']: | |
208 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
209 | defs['repo_name'] = defs['repo_name'].split('/')[-1] | |
209 |
|
210 | |||
210 | return defs |
|
211 | return defs | |
211 |
|
212 | |||
212 | def _get_group_create_params(self, **custom): |
|
213 | def _get_group_create_params(self, **custom): | |
213 | defs = { |
|
214 | defs = { | |
214 | 'group_name': None, |
|
215 | 'group_name': None, | |
215 | 'group_description': 'DESC', |
|
216 | 'group_description': 'DESC', | |
216 | 'perm_updates': [], |
|
217 | 'perm_updates': [], | |
217 | 'perm_additions': [], |
|
218 | 'perm_additions': [], | |
218 | 'perm_deletions': [], |
|
219 | 'perm_deletions': [], | |
219 | 'group_parent_id': -1, |
|
220 | 'group_parent_id': -1, | |
220 | 'enable_locking': False, |
|
221 | 'enable_locking': False, | |
221 | 'recursive': False, |
|
222 | 'recursive': False, | |
222 | } |
|
223 | } | |
223 | defs.update(custom) |
|
224 | defs.update(custom) | |
224 |
|
225 | |||
225 | return defs |
|
226 | return defs | |
226 |
|
227 | |||
227 | def _get_user_create_params(self, name, **custom): |
|
228 | def _get_user_create_params(self, name, **custom): | |
228 | defs = { |
|
229 | defs = { | |
229 | 'username': name, |
|
230 | 'username': name, | |
230 | 'password': 'qweqwe', |
|
231 | 'password': 'qweqwe', | |
231 | 'email': '%s+test@rhodecode.org' % name, |
|
232 | 'email': '%s+test@rhodecode.org' % name, | |
232 | 'firstname': 'TestUser', |
|
233 | 'firstname': 'TestUser', | |
233 | 'lastname': 'Test', |
|
234 | 'lastname': 'Test', | |
234 | 'description': 'test description', |
|
235 | 'description': 'test description', | |
235 | 'active': True, |
|
236 | 'active': True, | |
236 | 'admin': False, |
|
237 | 'admin': False, | |
237 | 'extern_type': 'rhodecode', |
|
238 | 'extern_type': 'rhodecode', | |
238 | 'extern_name': None, |
|
239 | 'extern_name': None, | |
239 | } |
|
240 | } | |
240 | defs.update(custom) |
|
241 | defs.update(custom) | |
241 |
|
242 | |||
242 | return defs |
|
243 | return defs | |
243 |
|
244 | |||
244 | def _get_user_group_create_params(self, name, **custom): |
|
245 | def _get_user_group_create_params(self, name, **custom): | |
245 | defs = { |
|
246 | defs = { | |
246 | 'users_group_name': name, |
|
247 | 'users_group_name': name, | |
247 | 'user_group_description': 'DESC', |
|
248 | 'user_group_description': 'DESC', | |
248 | 'users_group_active': True, |
|
249 | 'users_group_active': True, | |
249 | 'user_group_data': {}, |
|
250 | 'user_group_data': {}, | |
250 | } |
|
251 | } | |
251 | defs.update(custom) |
|
252 | defs.update(custom) | |
252 |
|
253 | |||
253 | return defs |
|
254 | return defs | |
254 |
|
255 | |||
255 | def create_repo(self, name, **kwargs): |
|
256 | def create_repo(self, name, **kwargs): | |
256 | repo_group = kwargs.get('repo_group') |
|
257 | repo_group = kwargs.get('repo_group') | |
257 | if isinstance(repo_group, RepoGroup): |
|
258 | if isinstance(repo_group, RepoGroup): | |
258 | kwargs['repo_group'] = repo_group.group_id |
|
259 | kwargs['repo_group'] = repo_group.group_id | |
259 | name = name.split(Repository.NAME_SEP)[-1] |
|
260 | name = name.split(Repository.NAME_SEP)[-1] | |
260 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
261 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) | |
261 |
|
262 | |||
262 | if 'skip_if_exists' in kwargs: |
|
263 | if 'skip_if_exists' in kwargs: | |
263 | del kwargs['skip_if_exists'] |
|
264 | del kwargs['skip_if_exists'] | |
264 | r = Repository.get_by_repo_name(name) |
|
265 | r = Repository.get_by_repo_name(name) | |
265 | if r: |
|
266 | if r: | |
266 | return r |
|
267 | return r | |
267 |
|
268 | |||
268 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
269 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) | |
269 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
270 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
270 | RepoModel().create(form_data, cur_user) |
|
271 | RepoModel().create(form_data, cur_user) | |
271 | Session().commit() |
|
272 | Session().commit() | |
272 | repo = Repository.get_by_repo_name(name) |
|
273 | repo = Repository.get_by_repo_name(name) | |
273 | assert repo |
|
274 | assert repo | |
274 | return repo |
|
275 | return repo | |
275 |
|
276 | |||
276 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
277 | def create_fork(self, repo_to_fork, fork_name, **kwargs): | |
277 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
278 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) | |
278 |
|
279 | |||
279 | form_data = self._get_repo_create_params(repo_name=fork_name, |
|
280 | form_data = self._get_repo_create_params(repo_name=fork_name, | |
280 | fork_parent_id=repo_to_fork.repo_id, |
|
281 | fork_parent_id=repo_to_fork.repo_id, | |
281 | repo_type=repo_to_fork.repo_type, |
|
282 | repo_type=repo_to_fork.repo_type, | |
282 | **kwargs) |
|
283 | **kwargs) | |
283 | #TODO: fix it !! |
|
284 | #TODO: fix it !! | |
284 | form_data['description'] = form_data['repo_description'] |
|
285 | form_data['description'] = form_data['repo_description'] | |
285 | form_data['private'] = form_data['repo_private'] |
|
286 | form_data['private'] = form_data['repo_private'] | |
286 | form_data['landing_rev'] = form_data['repo_landing_rev'] |
|
287 | form_data['landing_rev'] = form_data['repo_landing_rev'] | |
287 |
|
288 | |||
288 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
289 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
289 | RepoModel().create_fork(form_data, cur_user=owner) |
|
290 | RepoModel().create_fork(form_data, cur_user=owner) | |
290 | Session().commit() |
|
291 | Session().commit() | |
291 | r = Repository.get_by_repo_name(fork_name) |
|
292 | r = Repository.get_by_repo_name(fork_name) | |
292 | assert r |
|
293 | assert r | |
293 | return r |
|
294 | return r | |
294 |
|
295 | |||
295 | def destroy_repo(self, repo_name, **kwargs): |
|
296 | def destroy_repo(self, repo_name, **kwargs): | |
296 | RepoModel().delete(repo_name, pull_requests='delete', **kwargs) |
|
297 | RepoModel().delete(repo_name, pull_requests='delete', **kwargs) | |
297 | Session().commit() |
|
298 | Session().commit() | |
298 |
|
299 | |||
299 | def destroy_repo_on_filesystem(self, repo_name): |
|
300 | def destroy_repo_on_filesystem(self, repo_name): | |
300 | rm_path = os.path.join(RepoModel().repos_path, repo_name) |
|
301 | rm_path = os.path.join(RepoModel().repos_path, repo_name) | |
301 | if os.path.isdir(rm_path): |
|
302 | if os.path.isdir(rm_path): | |
302 | shutil.rmtree(rm_path) |
|
303 | shutil.rmtree(rm_path) | |
303 |
|
304 | |||
304 | def create_repo_group(self, name, **kwargs): |
|
305 | def create_repo_group(self, name, **kwargs): | |
305 | if 'skip_if_exists' in kwargs: |
|
306 | if 'skip_if_exists' in kwargs: | |
306 | del kwargs['skip_if_exists'] |
|
307 | del kwargs['skip_if_exists'] | |
307 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
308 | gr = RepoGroup.get_by_group_name(group_name=name) | |
308 | if gr: |
|
309 | if gr: | |
309 | return gr |
|
310 | return gr | |
310 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
311 | form_data = self._get_group_create_params(group_name=name, **kwargs) | |
311 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
312 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
312 | gr = RepoGroupModel().create( |
|
313 | gr = RepoGroupModel().create( | |
313 | group_name=form_data['group_name'], |
|
314 | group_name=form_data['group_name'], | |
314 | group_description=form_data['group_name'], |
|
315 | group_description=form_data['group_name'], | |
315 | owner=owner) |
|
316 | owner=owner) | |
316 | Session().commit() |
|
317 | Session().commit() | |
317 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
318 | gr = RepoGroup.get_by_group_name(gr.group_name) | |
318 | return gr |
|
319 | return gr | |
319 |
|
320 | |||
320 | def destroy_repo_group(self, repogroupid): |
|
321 | def destroy_repo_group(self, repogroupid): | |
321 | RepoGroupModel().delete(repogroupid) |
|
322 | RepoGroupModel().delete(repogroupid) | |
322 | Session().commit() |
|
323 | Session().commit() | |
323 |
|
324 | |||
324 | def create_user(self, name, **kwargs): |
|
325 | def create_user(self, name, **kwargs): | |
325 | if 'skip_if_exists' in kwargs: |
|
326 | if 'skip_if_exists' in kwargs: | |
326 | del kwargs['skip_if_exists'] |
|
327 | del kwargs['skip_if_exists'] | |
327 | user = User.get_by_username(name) |
|
328 | user = User.get_by_username(name) | |
328 | if user: |
|
329 | if user: | |
329 | return user |
|
330 | return user | |
330 | form_data = self._get_user_create_params(name, **kwargs) |
|
331 | form_data = self._get_user_create_params(name, **kwargs) | |
331 | user = UserModel().create(form_data) |
|
332 | user = UserModel().create(form_data) | |
332 |
|
333 | |||
333 | # create token for user |
|
334 | # create token for user | |
334 | AuthTokenModel().create( |
|
335 | AuthTokenModel().create( | |
335 | user=user, description=u'TEST_USER_TOKEN') |
|
336 | user=user, description=u'TEST_USER_TOKEN') | |
336 |
|
337 | |||
337 | Session().commit() |
|
338 | Session().commit() | |
338 | user = User.get_by_username(user.username) |
|
339 | user = User.get_by_username(user.username) | |
339 | return user |
|
340 | return user | |
340 |
|
341 | |||
341 | def destroy_user(self, userid): |
|
342 | def destroy_user(self, userid): | |
342 | UserModel().delete(userid) |
|
343 | UserModel().delete(userid) | |
343 | Session().commit() |
|
344 | Session().commit() | |
344 |
|
345 | |||
345 | def create_additional_user_email(self, user, email): |
|
346 | def create_additional_user_email(self, user, email): | |
346 | uem = UserEmailMap() |
|
347 | uem = UserEmailMap() | |
347 | uem.user = user |
|
348 | uem.user = user | |
348 | uem.email = email |
|
349 | uem.email = email | |
349 | Session().add(uem) |
|
350 | Session().add(uem) | |
350 | return uem |
|
351 | return uem | |
351 |
|
352 | |||
352 | def destroy_users(self, userid_iter): |
|
353 | def destroy_users(self, userid_iter): | |
353 | for user_id in userid_iter: |
|
354 | for user_id in userid_iter: | |
354 | if User.get_by_username(user_id): |
|
355 | if User.get_by_username(user_id): | |
355 | UserModel().delete(user_id) |
|
356 | UserModel().delete(user_id) | |
356 | Session().commit() |
|
357 | Session().commit() | |
357 |
|
358 | |||
358 | def create_user_group(self, name, **kwargs): |
|
359 | def create_user_group(self, name, **kwargs): | |
359 | if 'skip_if_exists' in kwargs: |
|
360 | if 'skip_if_exists' in kwargs: | |
360 | del kwargs['skip_if_exists'] |
|
361 | del kwargs['skip_if_exists'] | |
361 | gr = UserGroup.get_by_group_name(group_name=name) |
|
362 | gr = UserGroup.get_by_group_name(group_name=name) | |
362 | if gr: |
|
363 | if gr: | |
363 | return gr |
|
364 | return gr | |
364 | # map active flag to the real attribute. For API consistency of fixtures |
|
365 | # map active flag to the real attribute. For API consistency of fixtures | |
365 | if 'active' in kwargs: |
|
366 | if 'active' in kwargs: | |
366 | kwargs['users_group_active'] = kwargs['active'] |
|
367 | kwargs['users_group_active'] = kwargs['active'] | |
367 | del kwargs['active'] |
|
368 | del kwargs['active'] | |
368 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
369 | form_data = self._get_user_group_create_params(name, **kwargs) | |
369 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
370 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
370 | user_group = UserGroupModel().create( |
|
371 | user_group = UserGroupModel().create( | |
371 | name=form_data['users_group_name'], |
|
372 | name=form_data['users_group_name'], | |
372 | description=form_data['user_group_description'], |
|
373 | description=form_data['user_group_description'], | |
373 | owner=owner, active=form_data['users_group_active'], |
|
374 | owner=owner, active=form_data['users_group_active'], | |
374 | group_data=form_data['user_group_data']) |
|
375 | group_data=form_data['user_group_data']) | |
375 | Session().commit() |
|
376 | Session().commit() | |
376 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
377 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) | |
377 | return user_group |
|
378 | return user_group | |
378 |
|
379 | |||
379 | def destroy_user_group(self, usergroupid): |
|
380 | def destroy_user_group(self, usergroupid): | |
380 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
381 | UserGroupModel().delete(user_group=usergroupid, force=True) | |
381 | Session().commit() |
|
382 | Session().commit() | |
382 |
|
383 | |||
383 | def create_gist(self, **kwargs): |
|
384 | def create_gist(self, **kwargs): | |
384 | form_data = { |
|
385 | form_data = { | |
385 | 'description': 'new-gist', |
|
386 | 'description': 'new-gist', | |
386 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
387 | 'owner': TEST_USER_ADMIN_LOGIN, | |
387 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
388 | 'gist_type': GistModel.cls.GIST_PUBLIC, | |
388 | 'lifetime': -1, |
|
389 | 'lifetime': -1, | |
389 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
390 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, | |
390 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} |
|
391 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} | |
391 | } |
|
392 | } | |
392 | form_data.update(kwargs) |
|
393 | form_data.update(kwargs) | |
393 | gist = GistModel().create( |
|
394 | gist = GistModel().create( | |
394 | description=form_data['description'], owner=form_data['owner'], |
|
395 | description=form_data['description'], owner=form_data['owner'], | |
395 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
396 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], | |
396 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
397 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] | |
397 | ) |
|
398 | ) | |
398 | Session().commit() |
|
399 | Session().commit() | |
399 | return gist |
|
400 | return gist | |
400 |
|
401 | |||
401 | def destroy_gists(self, gistid=None): |
|
402 | def destroy_gists(self, gistid=None): | |
402 | for g in GistModel.cls.get_all(): |
|
403 | for g in GistModel.cls.get_all(): | |
403 | if gistid: |
|
404 | if gistid: | |
404 | if gistid == g.gist_access_id: |
|
405 | if gistid == g.gist_access_id: | |
405 | GistModel().delete(g) |
|
406 | GistModel().delete(g) | |
406 | else: |
|
407 | else: | |
407 | GistModel().delete(g) |
|
408 | GistModel().delete(g) | |
408 | Session().commit() |
|
409 | Session().commit() | |
409 |
|
410 | |||
410 | def load_resource(self, resource_name, strip=False): |
|
411 | def load_resource(self, resource_name, strip=False): | |
411 | with open(os.path.join(FIXTURES, resource_name)) as f: |
|
412 | with open(os.path.join(FIXTURES, resource_name)) as f: | |
412 | source = f.read() |
|
413 | source = f.read() | |
413 | if strip: |
|
414 | if strip: | |
414 | source = source.strip() |
|
415 | source = source.strip() | |
415 |
|
416 | |||
416 | return source |
|
417 | return source |
@@ -1,340 +1,342 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | py.test config for test suite for making push/pull operations. |
|
22 | py.test config for test suite for making push/pull operations. | |
23 |
|
23 | |||
24 | .. important:: |
|
24 | .. important:: | |
25 |
|
25 | |||
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |
27 | to redirect things to stderr instead of stdout. |
|
27 | to redirect things to stderr instead of stdout. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | import os |
|
30 | import os | |
31 | import tempfile |
|
31 | import tempfile | |
32 | import textwrap |
|
32 | import textwrap | |
33 | import pytest |
|
33 | import pytest | |
34 |
|
34 | |||
35 | from rhodecode import events |
|
35 | from rhodecode import events | |
36 | from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \ |
|
36 | from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \ | |
37 | UserToRepoBranchPermission, User |
|
37 | UserToRepoBranchPermission, User | |
38 | from rhodecode.model.integration import IntegrationModel |
|
38 | from rhodecode.model.integration import IntegrationModel | |
39 | from rhodecode.model.db import Repository |
|
39 | from rhodecode.model.db import Repository | |
40 | from rhodecode.model.meta import Session |
|
40 | from rhodecode.model.meta import Session | |
41 | from rhodecode.model.settings import SettingsModel |
|
41 | from rhodecode.model.settings import SettingsModel | |
42 | from rhodecode.integrations.types.webhook import WebhookIntegrationType |
|
42 | from rhodecode.integrations.types.webhook import WebhookIntegrationType | |
43 |
|
43 | |||
44 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
44 | from rhodecode.tests import GIT_REPO, HG_REPO | |
45 | from rhodecode.tests.fixture import Fixture |
|
45 | from rhodecode.tests.fixture import Fixture | |
46 | from rhodecode.tests.server_utils import RcWebServer |
|
46 | from rhodecode.tests.server_utils import RcWebServer | |
47 |
|
47 | |||
48 | REPO_GROUP = 'a_repo_group' |
|
48 | REPO_GROUP = 'a_repo_group' | |
49 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) |
|
49 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) | |
50 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) |
|
50 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | @pytest.fixture(scope="module") |
|
53 | @pytest.fixture(scope="module") | |
54 | def rcextensions(request, db_connection, tmpdir_factory): |
|
54 | def rcextensions(request, db_connection, tmpdir_factory): | |
55 | """ |
|
55 | """ | |
56 | Installs a testing rcextensions pack to ensure they work as expected. |
|
56 | Installs a testing rcextensions pack to ensure they work as expected. | |
57 | """ |
|
57 | """ | |
58 | init_content = textwrap.dedent(""" |
|
58 | init_content = textwrap.dedent(""" | |
59 | # Forward import the example rcextensions to make it |
|
59 | # Forward import the example rcextensions to make it | |
60 | # active for our tests. |
|
60 | # active for our tests. | |
61 | from rhodecode.tests.other.example_rcextensions import * |
|
61 | from rhodecode.tests.other.example_rcextensions import * | |
62 | """) |
|
62 | """) | |
63 |
|
63 | |||
64 | # Note: rcextensions are looked up based on the path of the ini file |
|
64 | # Note: rcextensions are looked up based on the path of the ini file | |
65 | root_path = tmpdir_factory.getbasetemp() |
|
65 | root_path = tmpdir_factory.getbasetemp() | |
66 | rcextensions_path = root_path.join('rcextensions') |
|
66 | rcextensions_path = root_path.join('rcextensions') | |
67 | init_path = rcextensions_path.join('__init__.py') |
|
67 | init_path = rcextensions_path.join('__init__.py') | |
68 |
|
68 | |||
69 | if rcextensions_path.check(): |
|
69 | if rcextensions_path.check(): | |
70 | pytest.fail( |
|
70 | pytest.fail( | |
71 | "Path for rcextensions already exists, please clean up before " |
|
71 | "Path for rcextensions already exists, please clean up before " | |
72 | "test run this path: %s" % (rcextensions_path, )) |
|
72 | "test run this path: %s" % (rcextensions_path, )) | |
73 | return |
|
73 | return | |
74 |
|
74 | |||
75 | request.addfinalizer(rcextensions_path.remove) |
|
75 | request.addfinalizer(rcextensions_path.remove) | |
76 | init_path.write_binary(init_content, ensure=True) |
|
76 | init_path.write_binary(init_content, ensure=True) | |
77 |
|
77 | |||
78 |
|
78 | |||
79 | @pytest.fixture(scope="module") |
|
79 | @pytest.fixture(scope="module") | |
80 | def repos(request, db_connection): |
|
80 | def repos(request, db_connection): | |
81 | """Create a copy of each test repo in a repo group.""" |
|
81 | """Create a copy of each test repo in a repo group.""" | |
82 | fixture = Fixture() |
|
82 | fixture = Fixture() | |
83 | repo_group = fixture.create_repo_group(REPO_GROUP) |
|
83 | repo_group = fixture.create_repo_group(REPO_GROUP) | |
84 | repo_group_id = repo_group.group_id |
|
84 | repo_group_id = repo_group.group_id | |
85 | fixture.create_fork(HG_REPO, HG_REPO, |
|
85 | fixture.create_fork(HG_REPO, HG_REPO, | |
86 | repo_name_full=HG_REPO_WITH_GROUP, |
|
86 | repo_name_full=HG_REPO_WITH_GROUP, | |
87 | repo_group=repo_group_id) |
|
87 | repo_group=repo_group_id) | |
88 | fixture.create_fork(GIT_REPO, GIT_REPO, |
|
88 | fixture.create_fork(GIT_REPO, GIT_REPO, | |
89 | repo_name_full=GIT_REPO_WITH_GROUP, |
|
89 | repo_name_full=GIT_REPO_WITH_GROUP, | |
90 | repo_group=repo_group_id) |
|
90 | repo_group=repo_group_id) | |
91 |
|
91 | |||
92 | @request.addfinalizer |
|
92 | @request.addfinalizer | |
93 | def cleanup(): |
|
93 | def cleanup(): | |
94 | fixture.destroy_repo(HG_REPO_WITH_GROUP) |
|
94 | fixture.destroy_repo(HG_REPO_WITH_GROUP) | |
95 | fixture.destroy_repo(GIT_REPO_WITH_GROUP) |
|
95 | fixture.destroy_repo(GIT_REPO_WITH_GROUP) | |
96 | fixture.destroy_repo_group(repo_group_id) |
|
96 | fixture.destroy_repo_group(repo_group_id) | |
97 |
|
97 | |||
98 |
|
98 | |||
99 | @pytest.fixture(scope="module") |
|
99 | @pytest.fixture(scope="module") | |
100 | def rc_web_server_config_modification(): |
|
100 | def rc_web_server_config_modification(): | |
101 | return [] |
|
101 | return [] | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | @pytest.fixture(scope="module") |
|
104 | @pytest.fixture(scope="module") | |
105 | def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification): |
|
105 | def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification): | |
106 | """ |
|
106 | """ | |
107 | Configuration file used for the fixture `rc_web_server`. |
|
107 | Configuration file used for the fixture `rc_web_server`. | |
108 | """ |
|
108 | """ | |
109 |
|
109 | |||
110 | def factory(rcweb_port, vcsserver_port): |
|
110 | def factory(rcweb_port, vcsserver_port): | |
111 | custom_params = [ |
|
111 | custom_params = [ | |
112 | {'handler_console': {'level': 'DEBUG'}}, |
|
112 | {'handler_console': {'level': 'DEBUG'}}, | |
113 | {'server:main': {'port': rcweb_port}}, |
|
113 | {'server:main': {'port': rcweb_port}}, | |
114 | {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}} |
|
114 | {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}} | |
115 | ] |
|
115 | ] | |
116 | custom_params.extend(rc_web_server_config_modification) |
|
116 | custom_params.extend(rc_web_server_config_modification) | |
117 | return testini_factory(custom_params) |
|
117 | return testini_factory(custom_params) | |
118 | return factory |
|
118 | return factory | |
119 |
|
119 | |||
120 |
|
120 | |||
121 | @pytest.fixture(scope="module") |
|
121 | @pytest.fixture(scope="module") | |
122 | def rc_web_server( |
|
122 | def rc_web_server( | |
123 | request, vcsserver_factory, available_port_factory, |
|
123 | request, vcsserver_factory, available_port_factory, | |
124 | rc_web_server_config_factory, repos, rcextensions): |
|
124 | rc_web_server_config_factory, repos, rcextensions): | |
125 | """ |
|
125 | """ | |
126 | Run the web server as a subprocess. with it's own instance of vcsserver |
|
126 | Run the web server as a subprocess. with it's own instance of vcsserver | |
127 | """ |
|
127 | """ | |
128 | rcweb_port = available_port_factory() |
|
128 | rcweb_port = available_port_factory() | |
129 | print('Using rcweb ops test port {}'.format(rcweb_port)) |
|
129 | print('Using rcweb ops test port {}'.format(rcweb_port)) | |
130 |
|
130 | |||
131 | vcsserver_port = available_port_factory() |
|
131 | vcsserver_port = available_port_factory() | |
132 | print('Using vcsserver ops test port {}'.format(vcsserver_port)) |
|
132 | print('Using vcsserver ops test port {}'.format(vcsserver_port)) | |
133 |
|
133 | |||
134 | vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log') |
|
134 | vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log') | |
135 | vcsserver_factory( |
|
135 | vcsserver_factory( | |
136 | request, vcsserver_port=vcsserver_port, |
|
136 | request, vcsserver_port=vcsserver_port, | |
137 | log_file=vcs_log, |
|
137 | log_file=vcs_log, | |
138 | overrides=( |
|
138 | overrides=( | |
139 | {'server:main': {'workers': 2}}, |
|
139 | {'server:main': {'workers': 2}}, | |
140 | {'server:main': {'graceful_timeout': 10}}, |
|
140 | {'server:main': {'graceful_timeout': 10}}, | |
141 | )) |
|
141 | )) | |
142 |
|
142 | |||
143 | rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log') |
|
143 | rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log') | |
144 | rc_web_server_config = rc_web_server_config_factory( |
|
144 | rc_web_server_config = rc_web_server_config_factory( | |
145 | rcweb_port=rcweb_port, |
|
145 | rcweb_port=rcweb_port, | |
146 | vcsserver_port=vcsserver_port) |
|
146 | vcsserver_port=vcsserver_port) | |
147 | server = RcWebServer(rc_web_server_config, log_file=rc_log) |
|
147 | server = RcWebServer(rc_web_server_config, log_file=rc_log) | |
148 | server.start() |
|
148 | server.start() | |
149 |
|
149 | |||
150 | @request.addfinalizer |
|
150 | @request.addfinalizer | |
151 | def cleanup(): |
|
151 | def cleanup(): | |
152 | server.shutdown() |
|
152 | server.shutdown() | |
153 |
|
153 | |||
154 | server.wait_until_ready() |
|
154 | server.wait_until_ready() | |
155 | return server |
|
155 | return server | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | @pytest.fixture() |
|
158 | @pytest.fixture() | |
159 | def disable_locking(baseapp): |
|
159 | def disable_locking(baseapp): | |
160 | r = Repository.get_by_repo_name(GIT_REPO) |
|
160 | r = Repository.get_by_repo_name(GIT_REPO) | |
161 | Repository.unlock(r) |
|
161 | Repository.unlock(r) | |
162 | r.enable_locking = False |
|
162 | r.enable_locking = False | |
163 | Session().add(r) |
|
163 | Session().add(r) | |
164 | Session().commit() |
|
164 | Session().commit() | |
165 |
|
165 | |||
166 | r = Repository.get_by_repo_name(HG_REPO) |
|
166 | r = Repository.get_by_repo_name(HG_REPO) | |
167 | Repository.unlock(r) |
|
167 | Repository.unlock(r) | |
168 | r.enable_locking = False |
|
168 | r.enable_locking = False | |
169 | Session().add(r) |
|
169 | Session().add(r) | |
170 | Session().commit() |
|
170 | Session().commit() | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | @pytest.fixture() |
|
173 | @pytest.fixture() | |
174 | def enable_auth_plugins(request, baseapp, csrf_token): |
|
174 | def enable_auth_plugins(request, baseapp, csrf_token): | |
175 | """ |
|
175 | """ | |
176 | Return a factory object that when called, allows to control which |
|
176 | Return a factory object that when called, allows to control which | |
177 | authentication plugins are enabled. |
|
177 | authentication plugins are enabled. | |
178 | """ |
|
178 | """ | |
179 | def _enable_plugins(plugins_list, override=None): |
|
179 | def _enable_plugins(plugins_list, override=None): | |
180 | override = override or {} |
|
180 | override = override or {} | |
181 | params = { |
|
181 | params = { | |
182 | 'auth_plugins': ','.join(plugins_list), |
|
182 | 'auth_plugins': ','.join(plugins_list), | |
183 | } |
|
183 | } | |
184 |
|
184 | |||
185 | # helper translate some names to others |
|
185 | # helper translate some names to others | |
186 | name_map = { |
|
186 | name_map = { | |
187 | 'token': 'authtoken' |
|
187 | 'token': 'authtoken' | |
188 | } |
|
188 | } | |
189 |
|
189 | |||
190 | for module in plugins_list: |
|
190 | for module in plugins_list: | |
191 | plugin_name = module.partition('#')[-1] |
|
191 | plugin_name = module.partition('#')[-1] | |
192 | if plugin_name in name_map: |
|
192 | if plugin_name in name_map: | |
193 | plugin_name = name_map[plugin_name] |
|
193 | plugin_name = name_map[plugin_name] | |
194 | enabled_plugin = 'auth_%s_enabled' % plugin_name |
|
194 | enabled_plugin = 'auth_%s_enabled' % plugin_name | |
195 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name |
|
195 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name | |
196 |
|
196 | |||
197 | # default params that are needed for each plugin, |
|
197 | # default params that are needed for each plugin, | |
198 | # `enabled` and `cache_ttl` |
|
198 | # `enabled` and `cache_ttl` | |
199 | params.update({ |
|
199 | params.update({ | |
200 | enabled_plugin: True, |
|
200 | enabled_plugin: True, | |
201 | cache_ttl: 0 |
|
201 | cache_ttl: 0 | |
202 | }) |
|
202 | }) | |
203 | if override.get: |
|
203 | if override.get: | |
204 | params.update(override.get(module, {})) |
|
204 | params.update(override.get(module, {})) | |
205 |
|
205 | |||
206 | validated_params = params |
|
206 | validated_params = params | |
207 | for k, v in validated_params.items(): |
|
207 | for k, v in validated_params.items(): | |
208 | setting = SettingsModel().create_or_update_setting(k, v) |
|
208 | setting = SettingsModel().create_or_update_setting(k, v) | |
209 | Session().add(setting) |
|
209 | Session().add(setting) | |
210 | Session().commit() |
|
210 | Session().commit() | |
211 |
|
211 | |||
|
212 | SettingsModel().invalidate_settings_cache() | |||
|
213 | ||||
212 | def cleanup(): |
|
214 | def cleanup(): | |
213 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) |
|
215 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) | |
214 |
|
216 | |||
215 | request.addfinalizer(cleanup) |
|
217 | request.addfinalizer(cleanup) | |
216 |
|
218 | |||
217 | return _enable_plugins |
|
219 | return _enable_plugins | |
218 |
|
220 | |||
219 |
|
221 | |||
220 | @pytest.fixture() |
|
222 | @pytest.fixture() | |
221 | def fs_repo_only(request, rhodecode_fixtures): |
|
223 | def fs_repo_only(request, rhodecode_fixtures): | |
222 | def fs_repo_fabric(repo_name, repo_type): |
|
224 | def fs_repo_fabric(repo_name, repo_type): | |
223 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) |
|
225 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) | |
224 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False) |
|
226 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False) | |
225 |
|
227 | |||
226 | def cleanup(): |
|
228 | def cleanup(): | |
227 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True) |
|
229 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True) | |
228 | rhodecode_fixtures.destroy_repo_on_filesystem(repo_name) |
|
230 | rhodecode_fixtures.destroy_repo_on_filesystem(repo_name) | |
229 |
|
231 | |||
230 | request.addfinalizer(cleanup) |
|
232 | request.addfinalizer(cleanup) | |
231 |
|
233 | |||
232 | return fs_repo_fabric |
|
234 | return fs_repo_fabric | |
233 |
|
235 | |||
234 |
|
236 | |||
235 | @pytest.fixture() |
|
237 | @pytest.fixture() | |
236 | def enable_webhook_push_integration(request): |
|
238 | def enable_webhook_push_integration(request): | |
237 | integration = Integration() |
|
239 | integration = Integration() | |
238 | integration.integration_type = WebhookIntegrationType.key |
|
240 | integration.integration_type = WebhookIntegrationType.key | |
239 | Session().add(integration) |
|
241 | Session().add(integration) | |
240 |
|
242 | |||
241 | settings = dict( |
|
243 | settings = dict( | |
242 | url='http://httpbin.org/post', |
|
244 | url='http://httpbin.org/post', | |
243 | secret_token='secret', |
|
245 | secret_token='secret', | |
244 | username=None, |
|
246 | username=None, | |
245 | password=None, |
|
247 | password=None, | |
246 | custom_header_key=None, |
|
248 | custom_header_key=None, | |
247 | custom_header_val=None, |
|
249 | custom_header_val=None, | |
248 | method_type='post', |
|
250 | method_type='post', | |
249 | events=[events.RepoPushEvent.name], |
|
251 | events=[events.RepoPushEvent.name], | |
250 | log_data=True |
|
252 | log_data=True | |
251 | ) |
|
253 | ) | |
252 |
|
254 | |||
253 | IntegrationModel().update_integration( |
|
255 | IntegrationModel().update_integration( | |
254 | integration, |
|
256 | integration, | |
255 | name='IntegrationWebhookTest', |
|
257 | name='IntegrationWebhookTest', | |
256 | enabled=True, |
|
258 | enabled=True, | |
257 | settings=settings, |
|
259 | settings=settings, | |
258 | repo=None, |
|
260 | repo=None, | |
259 | repo_group=None, |
|
261 | repo_group=None, | |
260 | child_repos_only=False, |
|
262 | child_repos_only=False, | |
261 | ) |
|
263 | ) | |
262 | Session().commit() |
|
264 | Session().commit() | |
263 | integration_id = integration.integration_id |
|
265 | integration_id = integration.integration_id | |
264 |
|
266 | |||
265 | @request.addfinalizer |
|
267 | @request.addfinalizer | |
266 | def cleanup(): |
|
268 | def cleanup(): | |
267 | integration = Integration.get(integration_id) |
|
269 | integration = Integration.get(integration_id) | |
268 | Session().delete(integration) |
|
270 | Session().delete(integration) | |
269 | Session().commit() |
|
271 | Session().commit() | |
270 |
|
272 | |||
271 |
|
273 | |||
272 | @pytest.fixture() |
|
274 | @pytest.fixture() | |
273 | def branch_permission_setter(request): |
|
275 | def branch_permission_setter(request): | |
274 | """ |
|
276 | """ | |
275 |
|
277 | |||
276 | def my_test(branch_permission_setter) |
|
278 | def my_test(branch_permission_setter) | |
277 | branch_permission_setter(repo_name, username, pattern='*', permission='branch.push') |
|
279 | branch_permission_setter(repo_name, username, pattern='*', permission='branch.push') | |
278 |
|
280 | |||
279 | """ |
|
281 | """ | |
280 |
|
282 | |||
281 | rule_id = None |
|
283 | rule_id = None | |
282 | write_perm_id = None |
|
284 | write_perm_id = None | |
283 | write_perm = None |
|
285 | write_perm = None | |
284 | rule = None |
|
286 | rule = None | |
285 |
|
287 | |||
286 | def _branch_permissions_setter( |
|
288 | def _branch_permissions_setter( | |
287 | repo_name, username, pattern='*', permission='branch.push_force'): |
|
289 | repo_name, username, pattern='*', permission='branch.push_force'): | |
288 | global rule_id, write_perm_id |
|
290 | global rule_id, write_perm_id | |
289 | global rule, write_perm |
|
291 | global rule, write_perm | |
290 |
|
292 | |||
291 | repo = Repository.get_by_repo_name(repo_name) |
|
293 | repo = Repository.get_by_repo_name(repo_name) | |
292 | repo_id = repo.repo_id |
|
294 | repo_id = repo.repo_id | |
293 |
|
295 | |||
294 | user = User.get_by_username(username) |
|
296 | user = User.get_by_username(username) | |
295 | user_id = user.user_id |
|
297 | user_id = user.user_id | |
296 |
|
298 | |||
297 | rule_perm_obj = Permission.get_by_key(permission) |
|
299 | rule_perm_obj = Permission.get_by_key(permission) | |
298 |
|
300 | |||
299 | # add new entry, based on existing perm entry |
|
301 | # add new entry, based on existing perm entry | |
300 | perm = UserRepoToPerm.query() \ |
|
302 | perm = UserRepoToPerm.query() \ | |
301 | .filter(UserRepoToPerm.repository_id == repo_id) \ |
|
303 | .filter(UserRepoToPerm.repository_id == repo_id) \ | |
302 | .filter(UserRepoToPerm.user_id == user_id) \ |
|
304 | .filter(UserRepoToPerm.user_id == user_id) \ | |
303 | .first() |
|
305 | .first() | |
304 |
|
306 | |||
305 | if not perm: |
|
307 | if not perm: | |
306 | # such user isn't defined in Permissions for repository |
|
308 | # such user isn't defined in Permissions for repository | |
307 | # we now on-the-fly add new permission |
|
309 | # we now on-the-fly add new permission | |
308 |
|
310 | |||
309 | write_perm = UserRepoToPerm() |
|
311 | write_perm = UserRepoToPerm() | |
310 | write_perm.permission = Permission.get_by_key('repository.write') |
|
312 | write_perm.permission = Permission.get_by_key('repository.write') | |
311 | write_perm.repository_id = repo_id |
|
313 | write_perm.repository_id = repo_id | |
312 | write_perm.user_id = user_id |
|
314 | write_perm.user_id = user_id | |
313 | Session().add(write_perm) |
|
315 | Session().add(write_perm) | |
314 | Session().flush() |
|
316 | Session().flush() | |
315 |
|
317 | |||
316 | perm = write_perm |
|
318 | perm = write_perm | |
317 |
|
319 | |||
318 | rule = UserToRepoBranchPermission() |
|
320 | rule = UserToRepoBranchPermission() | |
319 | rule.rule_to_perm_id = perm.repo_to_perm_id |
|
321 | rule.rule_to_perm_id = perm.repo_to_perm_id | |
320 | rule.branch_pattern = pattern |
|
322 | rule.branch_pattern = pattern | |
321 | rule.rule_order = 10 |
|
323 | rule.rule_order = 10 | |
322 | rule.permission = rule_perm_obj |
|
324 | rule.permission = rule_perm_obj | |
323 | rule.repository_id = repo_id |
|
325 | rule.repository_id = repo_id | |
324 | Session().add(rule) |
|
326 | Session().add(rule) | |
325 | Session().commit() |
|
327 | Session().commit() | |
326 |
|
328 | |||
327 | return rule |
|
329 | return rule | |
328 |
|
330 | |||
329 | @request.addfinalizer |
|
331 | @request.addfinalizer | |
330 | def cleanup(): |
|
332 | def cleanup(): | |
331 | if rule: |
|
333 | if rule: | |
332 | Session().delete(rule) |
|
334 | Session().delete(rule) | |
333 | Session().commit() |
|
335 | Session().commit() | |
334 | if write_perm: |
|
336 | if write_perm: | |
335 | Session().delete(write_perm) |
|
337 | Session().delete(write_perm) | |
336 | Session().commit() |
|
338 | Session().commit() | |
337 |
|
339 | |||
338 | return _branch_permissions_setter |
|
340 | return _branch_permissions_setter | |
339 |
|
341 | |||
340 |
|
342 |
General Comments 0
You need to be logged in to leave comments.
Login now